diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index fd62bf94353..a2f25647475 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -2,7 +2,7 @@ blank_issues_enabled: false version: 2.1 contact_links: - name: πŸ“š Documentation - url: https://github.com/langchain-ai/docs/issues/new?template=langchain.yml + url: https://github.com/langchain-ai/docs/issues/new?template=01-langchain.yml about: Report an issue related to the LangChain documentation - name: πŸ’¬ LangChain Forum url: https://forum.langchain.com/ diff --git a/.github/actions/poetry_setup/action.yml b/.github/actions/poetry_setup/action.yml deleted file mode 100644 index 68b099f0e55..00000000000 --- a/.github/actions/poetry_setup/action.yml +++ /dev/null @@ -1,93 +0,0 @@ -# An action for setting up poetry install with caching. -# Using a custom action since the default action does not -# take poetry install groups into account. -# Action code from: -# https://github.com/actions/setup-python/issues/505#issuecomment-1273013236 -name: poetry-install-with-caching -description: Poetry install with support for caching of dependency groups. - -inputs: - python-version: - description: Python version, supporting MAJOR.MINOR only - required: true - - poetry-version: - description: Poetry version - required: true - - cache-key: - description: Cache key to use for manual handling of caching - required: true - - working-directory: - description: Directory whose poetry.lock file should be cached - required: true - -runs: - using: composite - steps: - - uses: actions/setup-python@v5 - name: Setup python ${{ inputs.python-version }} - id: setup-python - with: - python-version: ${{ inputs.python-version }} - - - uses: actions/cache@v4 - id: cache-bin-poetry - name: Cache Poetry binary - Python ${{ inputs.python-version }} - env: - SEGMENT_DOWNLOAD_TIMEOUT_MIN: "1" - with: - path: | - /opt/pipx/venvs/poetry - # This step caches the poetry installation, so make sure it's keyed on the poetry version as well. - key: bin-poetry-${{ runner.os }}-${{ runner.arch }}-py-${{ inputs.python-version }}-${{ inputs.poetry-version }} - - - name: Refresh shell hashtable and fixup softlinks - if: steps.cache-bin-poetry.outputs.cache-hit == 'true' - shell: bash - env: - POETRY_VERSION: ${{ inputs.poetry-version }} - PYTHON_VERSION: ${{ inputs.python-version }} - run: | - set -eux - - # Refresh the shell hashtable, to ensure correct `which` output. - hash -r - - # `actions/cache@v3` doesn't always seem able to correctly unpack softlinks. - # Delete and recreate the softlinks pipx expects to have. - rm /opt/pipx/venvs/poetry/bin/python - cd /opt/pipx/venvs/poetry/bin - ln -s "$(which "python$PYTHON_VERSION")" python - chmod +x python - cd /opt/pipx_bin/ - ln -s /opt/pipx/venvs/poetry/bin/poetry poetry - chmod +x poetry - - # Ensure everything got set up correctly. - /opt/pipx/venvs/poetry/bin/python --version - /opt/pipx_bin/poetry --version - - - name: Install poetry - if: steps.cache-bin-poetry.outputs.cache-hit != 'true' - shell: bash - env: - POETRY_VERSION: ${{ inputs.poetry-version }} - PYTHON_VERSION: ${{ inputs.python-version }} - # Install poetry using the python version installed by setup-python step. - run: pipx install "poetry==$POETRY_VERSION" --python '${{ steps.setup-python.outputs.python-path }}' --verbose - - - name: Restore pip and poetry cached dependencies - uses: actions/cache@v4 - env: - SEGMENT_DOWNLOAD_TIMEOUT_MIN: "4" - WORKDIR: ${{ inputs.working-directory == '' && '.' || inputs.working-directory }} - with: - path: | - ~/.cache/pip - ~/.cache/pypoetry/virtualenvs - ~/.cache/pypoetry/cache - ~/.cache/pypoetry/artifacts - ${{ env.WORKDIR }}/.venv - key: py-deps-${{ runner.os }}-${{ runner.arch }}-py-${{ inputs.python-version }}-poetry-${{ inputs.poetry-version }}-${{ inputs.cache-key }}-${{ hashFiles(format('{0}/**/poetry.lock', env.WORKDIR)) }} diff --git a/.github/pr-title-labeler.yml b/.github/pr-title-labeler.yml deleted file mode 100644 index 24aedf6dcea..00000000000 --- a/.github/pr-title-labeler.yml +++ /dev/null @@ -1,41 +0,0 @@ -# PR title labeler config -# -# Labels PRs based on conventional commit patterns in titles -# -# Format: type(scope): description or type!: description (breaking) - -add-missing-labels: true -clear-prexisting: false -include-commits: false -include-title: true -label-for-breaking-changes: breaking - -label-mapping: - documentation: ["docs"] - feature: ["feat"] - fix: ["fix"] - infra: ["build", "ci", "chore"] - integration: - [ - "anthropic", - "chroma", - "deepseek", - "exa", - "fireworks", - "groq", - "huggingface", - "mistralai", - "nomic", - "ollama", - "openai", - "perplexity", - "prompty", - "qdrant", - "xai", - ] - linting: ["style"] - performance: ["perf"] - refactor: ["refactor"] - release: ["release"] - revert: ["revert"] - tests: ["test"] diff --git a/.github/scripts/check_diff.py b/.github/scripts/check_diff.py index e57b715166c..6eed89940e7 100644 --- a/.github/scripts/check_diff.py +++ b/.github/scripts/check_diff.py @@ -30,6 +30,7 @@ LANGCHAIN_DIRS = [ "libs/text-splitters", "libs/langchain", "libs/langchain_v1", + "libs/model-profiles", ] # When set to True, we are ignoring core dependents @@ -130,29 +131,20 @@ def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, str]]: return _get_pydantic_test_configs(dir_) if job == "codspeed": - py_versions = ["3.12"] # 3.13 is not yet supported + py_versions = ["3.13"] elif dir_ == "libs/core": - py_versions = ["3.10", "3.11", "3.12", "3.13"] + py_versions = ["3.10", "3.11", "3.12", "3.13", "3.14"] # custom logic for specific directories - - elif dir_ == "libs/langchain" and job == "extended-tests": + elif dir_ in {"libs/partners/chroma"}: py_versions = ["3.10", "3.13"] - elif dir_ == "libs/langchain_v1": - py_versions = ["3.10", "3.13"] - elif dir_ in {"libs/cli"}: - py_versions = ["3.10", "3.13"] - - elif dir_ == ".": - # unable to install with 3.13 because tokenizers doesn't support 3.13 yet - py_versions = ["3.10", "3.12"] else: - py_versions = ["3.10", "3.13"] + py_versions = ["3.10", "3.14"] return [{"working-directory": dir_, "python-version": py_v} for py_v in py_versions] def _get_pydantic_test_configs( - dir_: str, *, python_version: str = "3.11" + dir_: str, *, python_version: str = "3.12" ) -> List[Dict[str, str]]: with open("./libs/core/uv.lock", "rb") as f: core_uv_lock_data = tomllib.load(f) @@ -306,7 +298,9 @@ if __name__ == "__main__": if not filename.startswith(".") ] != ["README.md"]: dirs_to_run["test"].add(f"libs/partners/{partner_dir}") - dirs_to_run["codspeed"].add(f"libs/partners/{partner_dir}") + # Skip codspeed for partners without benchmarks or in IGNORED_PARTNERS + if partner_dir not in IGNORED_PARTNERS: + dirs_to_run["codspeed"].add(f"libs/partners/{partner_dir}") # Skip if the directory was deleted or is just a tombstone readme elif file.startswith("libs/"): # Check if this is a root-level file in libs/ (e.g., libs/README.md) diff --git a/.github/workflows/_release.yml b/.github/workflows/_release.yml index dc50158743b..86a089b8b24 100644 --- a/.github/workflows/_release.yml +++ b/.github/workflows/_release.yml @@ -77,7 +77,7 @@ jobs: working-directory: ${{ inputs.working-directory }} - name: Upload build - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: dist path: ${{ inputs.working-directory }}/dist/ @@ -149,8 +149,8 @@ jobs: fi fi - # if PREV_TAG is empty, let it be empty - if [ -z "$PREV_TAG" ]; then + # if PREV_TAG is empty or came out to 0.0.0, let it be empty + if [ -z "$PREV_TAG" ] || [ "$PREV_TAG" = "$PKG_NAME==0.0.0" ]; then echo "No previous tag found - first release" else # confirm prev-tag actually exists in git repo with git tag @@ -179,8 +179,8 @@ jobs: PREV_TAG: ${{ steps.check-tags.outputs.prev-tag }} run: | PREAMBLE="Changes since $PREV_TAG" - # if PREV_TAG is empty, then we are releasing the first version - if [ -z "$PREV_TAG" ]; then + # if PREV_TAG is empty or 0.0.0, then we are releasing the first version + if [ -z "$PREV_TAG" ] || [ "$PREV_TAG" = "$PKG_NAME==0.0.0" ]; then PREAMBLE="Initial release" PREV_TAG=$(git rev-list --max-parents=0 HEAD) fi @@ -208,7 +208,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/download-artifact@v5 + - uses: actions/download-artifact@v6 with: name: dist path: ${{ inputs.working-directory }}/dist/ @@ -258,7 +258,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - - uses: actions/download-artifact@v5 + - uses: actions/download-artifact@v6 with: name: dist path: ${{ inputs.working-directory }}/dist/ @@ -377,6 +377,7 @@ jobs: XAI_API_KEY: ${{ secrets.XAI_API_KEY }} DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }} PPLX_API_KEY: ${{ secrets.PPLX_API_KEY }} + LANGCHAIN_TESTS_USER_AGENT: ${{ secrets.LANGCHAIN_TESTS_USER_AGENT }} run: make integration_tests working-directory: ${{ inputs.working-directory }} @@ -395,7 +396,7 @@ jobs: contents: read strategy: matrix: - partner: [openai, anthropic] + partner: [openai] fail-fast: false # Continue testing other partners if one fails env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} @@ -409,6 +410,7 @@ jobs: AZURE_OPENAI_LEGACY_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LEGACY_CHAT_DEPLOYMENT_NAME }} AZURE_OPENAI_LLM_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LLM_DEPLOYMENT_NAME }} AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME }} + LANGCHAIN_TESTS_USER_AGENT: ${{ secrets.LANGCHAIN_TESTS_USER_AGENT }} steps: - uses: actions/checkout@v5 @@ -428,7 +430,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - - uses: actions/download-artifact@v5 + - uses: actions/download-artifact@v6 if: startsWith(inputs.working-directory, 'libs/core') with: name: dist @@ -442,7 +444,7 @@ jobs: git ls-remote --tags origin "langchain-${{ matrix.partner }}*" \ | awk '{print $2}' \ | sed 's|refs/tags/||' \ - | grep -E '[0-9]+\.[0-9]+\.[0-9]+([a-zA-Z]+[0-9]+)?$' \ + | grep -E '[0-9]+\.[0-9]+\.[0-9]+$' \ | sort -Vr \ | head -n 1 )" @@ -497,7 +499,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - - uses: actions/download-artifact@v5 + - uses: actions/download-artifact@v6 with: name: dist path: ${{ inputs.working-directory }}/dist/ @@ -537,7 +539,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - - uses: actions/download-artifact@v5 + - uses: actions/download-artifact@v6 with: name: dist path: ${{ inputs.working-directory }}/dist/ diff --git a/.github/workflows/_test_pydantic.yml b/.github/workflows/_test_pydantic.yml index 5c9211ded51..8fdac9475f7 100644 --- a/.github/workflows/_test_pydantic.yml +++ b/.github/workflows/_test_pydantic.yml @@ -13,7 +13,7 @@ on: required: false type: string description: "Python version to use" - default: "3.11" + default: "3.12" pydantic-version: required: true type: string @@ -51,7 +51,9 @@ jobs: - name: "πŸ”„ Install Specific Pydantic Version" shell: bash - run: VIRTUAL_ENV=.venv uv pip install pydantic~=${{ inputs.pydantic-version }} + env: + PYDANTIC_VERSION: ${{ inputs.pydantic-version }} + run: VIRTUAL_ENV=.venv uv pip install "pydantic~=$PYDANTIC_VERSION" - name: "πŸ§ͺ Run Core Tests" shell: bash diff --git a/.github/workflows/check_diffs.yml b/.github/workflows/check_diffs.yml index 3609e6e6379..55de7f9576c 100644 --- a/.github/workflows/check_diffs.yml +++ b/.github/workflows/check_diffs.yml @@ -184,15 +184,14 @@ jobs: steps: - uses: actions/checkout@v5 - # We have to use 3.12 as 3.13 is not yet supported - name: "πŸ“¦ Install UV Package Manager" - uses: astral-sh/setup-uv@v6 + uses: astral-sh/setup-uv@v7 with: - python-version: "3.12" + python-version: "3.13" - uses: actions/setup-python@v6 with: - python-version: "3.12" + python-version: "3.13" - name: "πŸ“¦ Install Test Dependencies" run: uv sync --group test diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml index e7cd2be5599..fda2d707a4d 100644 --- a/.github/workflows/integration_tests.yml +++ b/.github/workflows/integration_tests.yml @@ -23,10 +23,8 @@ permissions: contents: read env: - POETRY_VERSION: "1.8.4" UV_FROZEN: "true" DEFAULT_LIBS: '["libs/partners/openai", "libs/partners/anthropic", "libs/partners/fireworks", "libs/partners/groq", "libs/partners/mistralai", "libs/partners/xai", "libs/partners/google-vertexai", "libs/partners/google-genai", "libs/partners/aws"]' - POETRY_LIBS: ("libs/partners/aws") jobs: # Generate dynamic test matrix based on input parameters or defaults @@ -60,7 +58,6 @@ jobs: echo $matrix echo "matrix=$matrix" >> $GITHUB_OUTPUT # Run integration tests against partner libraries with live API credentials - # Tests are run with Poetry or UV depending on the library's setup build: if: github.repository_owner == 'langchain-ai' || github.event_name != 'schedule' name: "🐍 Python ${{ matrix.python-version }}: ${{ matrix.working-directory }}" @@ -95,17 +92,7 @@ jobs: mv langchain-google/libs/vertexai langchain/libs/partners/google-vertexai mv langchain-aws/libs/aws langchain/libs/partners/aws - - name: "🐍 Set up Python ${{ matrix.python-version }} + Poetry" - if: contains(env.POETRY_LIBS, matrix.working-directory) - uses: "./langchain/.github/actions/poetry_setup" - with: - python-version: ${{ matrix.python-version }} - poetry-version: ${{ env.POETRY_VERSION }} - working-directory: langchain/${{ matrix.working-directory }} - cache-key: scheduled - - name: "🐍 Set up Python ${{ matrix.python-version }} + UV" - if: "!contains(env.POETRY_LIBS, matrix.working-directory)" uses: "./langchain/.github/actions/uv_setup" with: python-version: ${{ matrix.python-version }} @@ -123,15 +110,7 @@ jobs: aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: ${{ secrets.AWS_REGION }} - - name: "πŸ“¦ Install Dependencies (Poetry)" - if: contains(env.POETRY_LIBS, matrix.working-directory) - run: | - echo "Running scheduled tests, installing dependencies with poetry..." - cd langchain/${{ matrix.working-directory }} - poetry install --with=test_integration,test - - - name: "πŸ“¦ Install Dependencies (UV)" - if: "!contains(env.POETRY_LIBS, matrix.working-directory)" + - name: "πŸ“¦ Install Dependencies" run: | echo "Running scheduled tests, installing dependencies with uv..." cd langchain/${{ matrix.working-directory }} @@ -176,6 +155,7 @@ jobs: WATSONX_APIKEY: ${{ secrets.WATSONX_APIKEY }} WATSONX_PROJECT_ID: ${{ secrets.WATSONX_PROJECT_ID }} XAI_API_KEY: ${{ secrets.XAI_API_KEY }} + LANGCHAIN_TESTS_USER_AGENT: ${{ secrets.LANGCHAIN_TESTS_USER_AGENT }} run: | cd langchain/${{ matrix.working-directory }} make integration_tests diff --git a/.github/workflows/pr_lint.yml b/.github/workflows/pr_lint.yml index 1f78c49e88e..1eea1fdd3c4 100644 --- a/.github/workflows/pr_lint.yml +++ b/.github/workflows/pr_lint.yml @@ -27,10 +27,10 @@ # * release β€” prepare a new release # # Allowed Scopes (optional): -# core, cli, langchain, langchain_v1, langchain_legacy, standard-tests, +# core, cli, langchain, langchain_v1, langchain-classic, standard-tests, # text-splitters, docs, anthropic, chroma, deepseek, exa, fireworks, groq, # huggingface, mistralai, nomic, ollama, openai, perplexity, prompty, qdrant, -# xai, infra +# xai, infra, deps # # Rules: # 1. The 'Type' must start with a lowercase letter. @@ -79,8 +79,8 @@ jobs: core cli langchain - langchain_v1 - langchain_legacy + langchain_classic + model-profiles standard-tests text-splitters docs diff --git a/.gitignore b/.gitignore index a7be7f22261..16656d5b27f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ .vs/ .claude/ .idea/ +#Emacs backup +*~ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 00000000000..ff1bd5ca296 --- /dev/null +++ b/.mcp.json @@ -0,0 +1,8 @@ +{ + "mcpServers": { + "docs-langchain": { + "type": "http", + "url": "https://docs.langchain.com/mcp" + } + } +} \ No newline at end of file diff --git a/AGENTS.md b/AGENTS.md index ee61a460a41..1e2b420aeff 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -152,20 +152,22 @@ def send_email(to: str, msg: str, *, priority: str = "normal") -> bool: priority: Email priority level (`'low'`, `'normal'`, `'high'`). Returns: - True if email was sent successfully, False otherwise. + `True` if email was sent successfully, `False` otherwise. Raises: - InvalidEmailError: If the email address format is invalid. - SMTPConnectionError: If unable to connect to email server. + `InvalidEmailError`: If the email address format is invalid. + `SMTPConnectionError`: If unable to connect to email server. """ ``` **Documentation Guidelines:** - Types go in function signatures, NOT in docstrings + - If a default is present, DO NOT repeat it in the docstring unless there is post-processing or it is set conditionally. - Focus on "why" rather than "what" in descriptions - Document all parameters, return values, and exceptions - Keep descriptions concise but clear +- Ensure American English spelling (e.g., "behavior", not "behaviour") πŸ“Œ *Tip:* Keep descriptions concise but clear. Only document return values if non-obvious. diff --git a/CLAUDE.md b/CLAUDE.md index ee61a460a41..1e2b420aeff 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -152,20 +152,22 @@ def send_email(to: str, msg: str, *, priority: str = "normal") -> bool: priority: Email priority level (`'low'`, `'normal'`, `'high'`). Returns: - True if email was sent successfully, False otherwise. + `True` if email was sent successfully, `False` otherwise. Raises: - InvalidEmailError: If the email address format is invalid. - SMTPConnectionError: If unable to connect to email server. + `InvalidEmailError`: If the email address format is invalid. + `SMTPConnectionError`: If unable to connect to email server. """ ``` **Documentation Guidelines:** - Types go in function signatures, NOT in docstrings + - If a default is present, DO NOT repeat it in the docstring unless there is post-processing or it is set conditionally. - Focus on "why" rather than "what" in descriptions - Document all parameters, return values, and exceptions - Keep descriptions concise but clear +- Ensure American English spelling (e.g., "behavior", not "behaviour") πŸ“Œ *Tip:* Keep descriptions concise but clear. Only document return values if non-obvious. diff --git a/MIGRATE.md b/MIGRATE.md index 29ca5e599ec..b94475225cf 100644 --- a/MIGRATE.md +++ b/MIGRATE.md @@ -2,6 +2,7 @@ Please see the following guides for migrating LangChain code: +* Migrate to [LangChain v1.0](https://docs.langchain.com/oss/python/migrate/langchain-v1) * Migrate to [LangChain v0.3](https://python.langchain.com/docs/versions/v0_3/) * Migrate to [LangChain v0.2](https://python.langchain.com/docs/versions/v0_2/) * Migrating from [LangChain 0.0.x Chains](https://python.langchain.com/docs/versions/migrating_chains/) diff --git a/README.md b/README.md index 3c8c8404ebe..ab881072bf8 100644 --- a/README.md +++ b/README.md @@ -12,13 +12,16 @@

- PyPI - License + PyPI - License - + PyPI - Downloads + + Version + - Open in Dev Containers + Open in Dev Containers Open in Github Codespace @@ -31,17 +34,22 @@

-LangChain is a framework for building LLM-powered applications. It helps you chain together interoperable components and third-party integrations to simplify AI application development β€” all while future-proofing decisions as the underlying technology evolves. +LangChain is a framework for building agents and LLM-powered applications. It helps you chain together interoperable components and third-party integrations to simplify AI application development – all while future-proofing decisions as the underlying technology evolves. ```bash -pip install -U langchain +pip install langchain ``` +If you're looking for more advanced customization or agent orchestration, check out [LangGraph](https://docs.langchain.com/oss/python/langgraph/overview), our framework for building controllable agent workflows. + --- -**Documentation**: To learn more about LangChain, check out [the docs](https://docs.langchain.com/). +**Documentation**: -If you're looking for more advanced customization or agent orchestration, check out [LangGraph](https://langchain-ai.github.io/langgraph/), our framework for building controllable agent workflows. +- [docs.langchain.com](https://docs.langchain.com/oss/python/langchain/overview) – Comprehensive documentation, including conceptual overviews and guides +- [reference.langchain.com/python](https://reference.langchain.com/python) – API reference docs for LangChain packages + +**Discussions**: Visit the [LangChain Forum](https://forum.langchain.com) to connect with the community and share all of your technical questions, ideas, and feedback. > [!NOTE] > Looking for the JS/TS library? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). @@ -52,26 +60,27 @@ LangChain helps developers build applications powered by LLMs through a standard Use LangChain for: -- **Real-time data augmentation**. Easily connect LLMs to diverse data sources and external/internal systems, drawing from LangChain’s vast library of integrations with model providers, tools, vector stores, retrievers, and more. -- **Model interoperability**. Swap models in and out as your engineering team experiments to find the best choice for your application’s needs. As the industry frontier evolves, adapt quickly β€” LangChain’s abstractions keep you moving without losing momentum. +- **Real-time data augmentation**. Easily connect LLMs to diverse data sources and external/internal systems, drawing from LangChain's vast library of integrations with model providers, tools, vector stores, retrievers, and more. +- **Model interoperability**. Swap models in and out as your engineering team experiments to find the best choice for your application's needs. As the industry frontier evolves, adapt quickly – LangChain's abstractions keep you moving without losing momentum. +- **Rapid prototyping**. Quickly build and iterate on LLM applications with LangChain's modular, component-based architecture. Test different approaches and workflows without rebuilding from scratch, accelerating your development cycle. +- **Production-ready features**. Deploy reliable applications with built-in support for monitoring, evaluation, and debugging through integrations like LangSmith. Scale with confidence using battle-tested patterns and best practices. +- **Vibrant community and ecosystem**. Leverage a rich ecosystem of integrations, templates, and community-contributed components. Benefit from continuous improvements and stay up-to-date with the latest AI developments through an active open-source community. +- **Flexible abstraction layers**. Work at the level of abstraction that suits your needs - from high-level chains for quick starts to low-level components for fine-grained control. LangChain grows with your application's complexity. -## LangChain’s ecosystem +## LangChain ecosystem While the LangChain framework can be used standalone, it also integrates seamlessly with any LangChain product, giving developers a full suite of tools when building LLM applications. To improve your LLM application development, pair LangChain with: -- [LangSmith](https://www.langchain.com/langsmith) - Helpful for agent evals and observability. Debug poor-performing LLM app runs, evaluate agent trajectories, gain visibility in production, and improve performance over time. -- [LangGraph](https://langchain-ai.github.io/langgraph/) - Build agents that can reliably handle complex tasks with LangGraph, our low-level agent orchestration framework. LangGraph offers customizable architecture, long-term memory, and human-in-the-loop workflows β€” and is trusted in production by companies like LinkedIn, Uber, Klarna, and GitLab. -- [LangGraph Platform](https://docs.langchain.com/langgraph-platform) - Deploy and scale agents effortlessly with a purpose-built deployment platform for long-running, stateful workflows. Discover, reuse, configure, and share agents across teams β€” and iterate quickly with visual prototyping in [LangGraph Studio](https://langchain-ai.github.io/langgraph/concepts/langgraph_studio/). +- [LangGraph](https://docs.langchain.com/oss/python/langgraph/overview) – Build agents that can reliably handle complex tasks with LangGraph, our low-level agent orchestration framework. LangGraph offers customizable architecture, long-term memory, and human-in-the-loop workflows – and is trusted in production by companies like LinkedIn, Uber, Klarna, and GitLab. +- [Integrations](https://docs.langchain.com/oss/python/integrations/providers/overview) – List of LangChain integrations, including chat & embedding models, tools & toolkits, and more +- [LangSmith](https://www.langchain.com/langsmith) – Helpful for agent evals and observability. Debug poor-performing LLM app runs, evaluate agent trajectories, gain visibility in production, and improve performance over time. +- [LangSmith Deployment](https://docs.langchain.com/langsmith/deployments) – Deploy and scale agents effortlessly with a purpose-built deployment platform for long-running, stateful workflows. Discover, reuse, configure, and share agents across teams – and iterate quickly with visual prototyping in [LangSmith Studio](https://docs.langchain.com/langsmith/studio). +- [Deep Agents](https://github.com/langchain-ai/deepagents) *(new!)* – Build agents that can plan, use subagents, and leverage file systems for complex tasks ## Additional resources -- [Conceptual Guides](https://docs.langchain.com/oss/python/langchain/overview): Explanations of key -concepts behind the LangChain framework. -- [Tutorials](https://docs.langchain.com/oss/python/learn): Simple walkthroughs with -guided examples on getting started with LangChain. -- [API Reference](https://reference.langchain.com/python/): Detailed reference on -navigating base packages and integrations for LangChain. -- [LangChain Forum](https://forum.langchain.com/): Connect with the community and share all of your technical questions, ideas, and feedback. -- [Chat LangChain](https://chat.langchain.com/): Ask questions & chat with our documentation. +- [API Reference](https://reference.langchain.com/python) – Detailed reference on navigating base packages and integrations for LangChain. +- [Contributing Guide](https://docs.langchain.com/oss/python/contributing/overview) – Learn how to contribute to LangChain projects and find good first issues. +- [Code of Conduct](https://github.com/langchain-ai/langchain/blob/master/.github/CODE_OF_CONDUCT.md) – Our community guidelines and standards for participation. diff --git a/SECURITY.md b/SECURITY.md index 1af4746f78a..c35d9342194 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -55,10 +55,10 @@ All out of scope targets defined by huntr as well as: * **langchain-experimental**: This repository is for experimental code and is not eligible for bug bounties (see [package warning](https://pypi.org/project/langchain-experimental/)), bug reports to it will be marked as interesting or waste of time and published with no bounty attached. -* **tools**: Tools in either langchain or langchain-community are not eligible for bug +* **tools**: Tools in either `langchain` or `langchain-community` are not eligible for bug bounties. This includes the following directories - * libs/langchain/langchain/tools - * libs/community/langchain_community/tools + * `libs/langchain/langchain/tools` + * `libs/community/langchain_community/tools` * Please review the [Best Practices](#best-practices) for more details, but generally tools interact with the real world. Developers are expected to understand the security implications of their code and are responsible diff --git a/libs/cli/README.md b/libs/cli/README.md index f86c6ef69d4..7c29748e954 100644 --- a/libs/cli/README.md +++ b/libs/cli/README.md @@ -1,6 +1,30 @@ # langchain-cli -This package implements the official CLI for LangChain. Right now, it is most useful -for getting started with LangChain Templates! +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-cli?label=%20)](https://pypi.org/project/langchain-cli/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-cli)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-cli)](https://pypistats.org/packages/langchain-cli) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +## Quick Install + +```bash +pip install langchain-cli +``` + +## πŸ€” What is this? + +This package implements the official CLI for LangChain. Right now, it is most useful for getting started with LangChain Templates! + +## πŸ“– Documentation [CLI Docs](https://github.com/langchain-ai/langchain/blob/master/libs/cli/DOCS.md) + +## πŸ“• Releases & Versioning + +See our [Releases](https://docs.langchain.com/oss/python/release-policy) and [Versioning](https://docs.langchain.com/oss/python/versioning) policies. + +## πŸ’ Contributing + +As an open-source project in a rapidly developing field, we are extremely open to contributions, whether it be in the form of a new feature, improved infrastructure, or better documentation. + +For detailed information on how to contribute, see the [Contributing Guide](https://docs.langchain.com/oss/python/contributing/overview). diff --git a/libs/cli/langchain_cli/integration_template/README.md b/libs/cli/langchain_cli/integration_template/README.md index f8d70df8005..15741c62f85 100644 --- a/libs/cli/langchain_cli/integration_template/README.md +++ b/libs/cli/langchain_cli/integration_template/README.md @@ -19,8 +19,8 @@ And you should configure credentials by setting the following environment variab ```python from __module_name__ import Chat__ModuleName__ -llm = Chat__ModuleName__() -llm.invoke("Sing a ballad of LangChain.") +model = Chat__ModuleName__() +model.invoke("Sing a ballad of LangChain.") ``` ## Embeddings @@ -41,6 +41,6 @@ embeddings.embed_query("What is the meaning of life?") ```python from __module_name__ import __ModuleName__LLM -llm = __ModuleName__LLM() -llm.invoke("The meaning of life is") +model = __ModuleName__LLM() +model.invoke("The meaning of life is") ``` diff --git a/libs/cli/langchain_cli/integration_template/docs/chat.ipynb b/libs/cli/langchain_cli/integration_template/docs/chat.ipynb index 86221b5fc63..32f4cb8e1b0 100644 --- a/libs/cli/langchain_cli/integration_template/docs/chat.ipynb +++ b/libs/cli/langchain_cli/integration_template/docs/chat.ipynb @@ -1,262 +1,264 @@ { - "cells": [ - { - "cell_type": "raw", - "id": "afaf8039", - "metadata": {}, - "source": [ - "---\n", - "sidebar_label: __ModuleName__\n", - "---" - ] + "cells": [ + { + "cell_type": "raw", + "id": "afaf8039", + "metadata": {}, + "source": [ + "---\n", + "sidebar_label: __ModuleName__\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "e49f1e0d", + "metadata": {}, + "source": [ + "# Chat__ModuleName__\n", + "\n", + "- TODO: Make sure API reference link is correct.\n", + "\n", + "This will help you get started with __ModuleName__ [chat models](/docs/concepts/chat_models). For detailed documentation of all Chat__ModuleName__ features and configurations head to the [API reference](https://python.langchain.com/api_reference/__package_name_short_snake__/chat_models/__module_name__.chat_models.Chat__ModuleName__.html).\n", + "\n", + "- TODO: Add any other relevant links, like information about models, prices, context windows, etc. See https://python.langchain.com/docs/integrations/chat/openai/ for an example.\n", + "\n", + "## Overview\n", + "### Integration details\n", + "\n", + "- TODO: Fill in table features.\n", + "- TODO: Remove JS support link if not relevant, otherwise ensure link is correct.\n", + "- TODO: Make sure API reference links are correct.\n", + "\n", + "| Class | Package | Local | Serializable | [JS support](https://js.langchain.com/docs/integrations/chat/__package_name_short_snake__) | Package downloads | Package latest |\n", + "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n", + "| [Chat__ModuleName__](https://python.langchain.com/api_reference/__package_name_short_snake__/chat_models/__module_name__.chat_models.Chat__ModuleName__.html) | [__package_name__](https://python.langchain.com/api_reference/__package_name_short_snake__/) | βœ…/❌ | beta/❌ | βœ…/❌ | ![PyPI - Downloads](https://img.shields.io/pypi/dm/__package_name__&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/__package_name__&label=%20) |\n", + "\n", + "### Model features\n", + "| [Tool calling](/docs/how_to/tool_calling) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n", + "| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n", + "| βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ |\n", + "\n", + "## Setup\n", + "\n", + "- TODO: Update with relevant info.\n", + "\n", + "To access __ModuleName__ models you'll need to create a/an __ModuleName__ account, get an API key, and install the `__package_name__` integration package.\n", + "\n", + "### Credentials\n", + "\n", + "- TODO: Update with relevant info.\n", + "\n", + "Head to (TODO: link) to sign up to __ModuleName__ and generate an API key. Once you've done this set the __MODULE_NAME___API_KEY environment variable:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "433e8d2b-9519-4b49-b2c4-7ab65b046c94", + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "if not os.getenv(\"__MODULE_NAME___API_KEY\"):\n", + " os.environ[\"__MODULE_NAME___API_KEY\"] = getpass.getpass(\n", + " \"Enter your __ModuleName__ API key: \"\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "72ee0c4b-9764-423a-9dbf-95129e185210", + "metadata": {}, + "source": [ + "To enable automated tracing of your model calls, set your [LangSmith](https://docs.smith.langchain.com/) API key:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a15d341e-3e26-4ca3-830b-5aab30ed66de", + "metadata": {}, + "outputs": [], + "source": [ + "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"\n", + "# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")" + ] + }, + { + "cell_type": "markdown", + "id": "0730d6a1-c893-4840-9817-5e5251676d5d", + "metadata": {}, + "source": [ + "### Installation\n", + "\n", + "The LangChain __ModuleName__ integration lives in the `__package_name__` package:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "652d6238-1f87-422a-b135-f5abbb8652fc", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install -qU __package_name__" + ] + }, + { + "cell_type": "markdown", + "id": "a38cde65-254d-4219-a441-068766c0d4b5", + "metadata": {}, + "source": [ + "## Instantiation\n", + "\n", + "Now we can instantiate our model object and generate chat completions:\n", + "\n", + "- TODO: Update model instantiation with relevant params." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cb09c344-1836-4e0c-acf8-11d13ac1dbae", + "metadata": {}, + "outputs": [], + "source": [ + "from __module_name__ import Chat__ModuleName__\n", + "\n", + "model = Chat__ModuleName__(\n", + " model=\"model-name\",\n", + " temperature=0,\n", + " max_tokens=None,\n", + " timeout=None,\n", + " max_retries=2,\n", + " # other params...\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "2b4f3e15", + "metadata": {}, + "source": [ + "## Invocation\n", + "\n", + "- TODO: Run cells so output can be seen." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "62e0dbc3", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "messages = [\n", + " (\n", + " \"system\",\n", + " \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n", + " ),\n", + " (\"human\", \"I love programming.\"),\n", + "]\n", + "ai_msg = model.invoke(messages)\n", + "ai_msg" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d86145b3-bfef-46e8-b227-4dda5c9c2705", + "metadata": {}, + "outputs": [], + "source": [ + "print(ai_msg.content)" + ] + }, + { + "cell_type": "markdown", + "id": "18e2bfc0-7e78-4528-a73f-499ac150dca8", + "metadata": {}, + "source": [ + "## Chaining\n", + "\n", + "We can [chain](/docs/how_to/sequence/) our model with a prompt template like so:\n", + "\n", + "- TODO: Run cells so output can be seen." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_core.prompts import ChatPromptTemplate\n", + "\n", + "prompt = ChatPromptTemplate(\n", + " [\n", + " (\n", + " \"system\",\n", + " \"You are a helpful assistant that translates {input_language} to {output_language}.\",\n", + " ),\n", + " (\"human\", \"{input}\"),\n", + " ]\n", + ")\n", + "\n", + "chain = prompt | model\n", + "chain.invoke(\n", + " {\n", + " \"input_language\": \"English\",\n", + " \"output_language\": \"German\",\n", + " \"input\": \"I love programming.\",\n", + " }\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "d1ee55bc-ffc8-4cfa-801c-993953a08cfd", + "metadata": {}, + "source": [ + "## TODO: Any functionality specific to this model provider\n", + "\n", + "E.g. creating/using finetuned models via this provider. Delete if not relevant." + ] + }, + { + "cell_type": "markdown", + "id": "3a5bb5ca-c3ae-4a58-be67-2cd18574b9a3", + "metadata": {}, + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all Chat__ModuleName__ features and configurations head to the [API reference](https://python.langchain.com/api_reference/__package_name_short_snake__/chat_models/__module_name__.chat_models.Chat__ModuleName__.html)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } }, - { - "cell_type": "markdown", - "id": "e49f1e0d", - "metadata": {}, - "source": [ - "# Chat__ModuleName__\n", - "\n", - "- TODO: Make sure API reference link is correct.\n", - "\n", - "This will help you get started with __ModuleName__ [chat models](/docs/concepts/chat_models). For detailed documentation of all Chat__ModuleName__ features and configurations head to the [API reference](https://python.langchain.com/api_reference/__package_name_short_snake__/chat_models/__module_name__.chat_models.Chat__ModuleName__.html).\n", - "\n", - "- TODO: Add any other relevant links, like information about models, prices, context windows, etc. See https://python.langchain.com/docs/integrations/chat/openai/ for an example.\n", - "\n", - "## Overview\n", - "### Integration details\n", - "\n", - "- TODO: Fill in table features.\n", - "- TODO: Remove JS support link if not relevant, otherwise ensure link is correct.\n", - "- TODO: Make sure API reference links are correct.\n", - "\n", - "| Class | Package | Local | Serializable | [JS support](https://js.langchain.com/docs/integrations/chat/__package_name_short_snake__) | Package downloads | Package latest |\n", - "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n", - "| [Chat__ModuleName__](https://python.langchain.com/api_reference/__package_name_short_snake__/chat_models/__module_name__.chat_models.Chat__ModuleName__.html) | [__package_name__](https://python.langchain.com/api_reference/__package_name_short_snake__/) | βœ…/❌ | beta/❌ | βœ…/❌ | ![PyPI - Downloads](https://img.shields.io/pypi/dm/__package_name__?style=flat-square&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/__package_name__?style=flat-square&label=%20) |\n", - "\n", - "### Model features\n", - "| [Tool calling](/docs/how_to/tool_calling) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n", - "| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n", - "| βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ | βœ…/❌ |\n", - "\n", - "## Setup\n", - "\n", - "- TODO: Update with relevant info.\n", - "\n", - "To access __ModuleName__ models you'll need to create a/an __ModuleName__ account, get an API key, and install the `__package_name__` integration package.\n", - "\n", - "### Credentials\n", - "\n", - "- TODO: Update with relevant info.\n", - "\n", - "Head to (TODO: link) to sign up to __ModuleName__ and generate an API key. Once you've done this set the __MODULE_NAME___API_KEY environment variable:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "433e8d2b-9519-4b49-b2c4-7ab65b046c94", - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "import os\n", - "\n", - "if not os.getenv(\"__MODULE_NAME___API_KEY\"):\n", - " os.environ[\"__MODULE_NAME___API_KEY\"] = getpass.getpass(\n", - " \"Enter your __ModuleName__ API key: \"\n", - " )" - ] - }, - { - "cell_type": "markdown", - "id": "72ee0c4b-9764-423a-9dbf-95129e185210", - "metadata": {}, - "source": "To enable automated tracing of your model calls, set your [LangSmith](https://docs.smith.langchain.com/) API key:" - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a15d341e-3e26-4ca3-830b-5aab30ed66de", - "metadata": {}, - "outputs": [], - "source": [ - "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"\n", - "# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")" - ] - }, - { - "cell_type": "markdown", - "id": "0730d6a1-c893-4840-9817-5e5251676d5d", - "metadata": {}, - "source": [ - "### Installation\n", - "\n", - "The LangChain __ModuleName__ integration lives in the `__package_name__` package:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "652d6238-1f87-422a-b135-f5abbb8652fc", - "metadata": {}, - "outputs": [], - "source": [ - "%pip install -qU __package_name__" - ] - }, - { - "cell_type": "markdown", - "id": "a38cde65-254d-4219-a441-068766c0d4b5", - "metadata": {}, - "source": [ - "## Instantiation\n", - "\n", - "Now we can instantiate our model object and generate chat completions:\n", - "\n", - "- TODO: Update model instantiation with relevant params." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "cb09c344-1836-4e0c-acf8-11d13ac1dbae", - "metadata": {}, - "outputs": [], - "source": [ - "from __module_name__ import Chat__ModuleName__\n", - "\n", - "llm = Chat__ModuleName__(\n", - " model=\"model-name\",\n", - " temperature=0,\n", - " max_tokens=None,\n", - " timeout=None,\n", - " max_retries=2,\n", - " # other params...\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "2b4f3e15", - "metadata": {}, - "source": [ - "## Invocation\n", - "\n", - "- TODO: Run cells so output can be seen." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "62e0dbc3", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "messages = [\n", - " (\n", - " \"system\",\n", - " \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n", - " ),\n", - " (\"human\", \"I love programming.\"),\n", - "]\n", - "ai_msg = llm.invoke(messages)\n", - "ai_msg" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d86145b3-bfef-46e8-b227-4dda5c9c2705", - "metadata": {}, - "outputs": [], - "source": [ - "print(ai_msg.content)" - ] - }, - { - "cell_type": "markdown", - "id": "18e2bfc0-7e78-4528-a73f-499ac150dca8", - "metadata": {}, - "source": [ - "## Chaining\n", - "\n", - "We can [chain](/docs/how_to/sequence/) our model with a prompt template like so:\n", - "\n", - "- TODO: Run cells so output can be seen." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_core.prompts import ChatPromptTemplate\n", - "\n", - "prompt = ChatPromptTemplate(\n", - " [\n", - " (\n", - " \"system\",\n", - " \"You are a helpful assistant that translates {input_language} to {output_language}.\",\n", - " ),\n", - " (\"human\", \"{input}\"),\n", - " ]\n", - ")\n", - "\n", - "chain = prompt | llm\n", - "chain.invoke(\n", - " {\n", - " \"input_language\": \"English\",\n", - " \"output_language\": \"German\",\n", - " \"input\": \"I love programming.\",\n", - " }\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "d1ee55bc-ffc8-4cfa-801c-993953a08cfd", - "metadata": {}, - "source": [ - "## TODO: Any functionality specific to this model provider\n", - "\n", - "E.g. creating/using finetuned models via this provider. Delete if not relevant." - ] - }, - { - "cell_type": "markdown", - "id": "3a5bb5ca-c3ae-4a58-be67-2cd18574b9a3", - "metadata": {}, - "source": [ - "## API reference\n", - "\n", - "For detailed documentation of all Chat__ModuleName__ features and configurations head to the [API reference](https://python.langchain.com/api_reference/__package_name_short_snake__/chat_models/__module_name__.chat_models.Chat__ModuleName__.html)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.9" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/libs/cli/langchain_cli/integration_template/docs/llms.ipynb b/libs/cli/langchain_cli/integration_template/docs/llms.ipynb index 217929fff71..ffeff84e280 100644 --- a/libs/cli/langchain_cli/integration_template/docs/llms.ipynb +++ b/libs/cli/langchain_cli/integration_template/docs/llms.ipynb @@ -1,236 +1,238 @@ { - "cells": [ - { - "cell_type": "raw", - "id": "67db2992", - "metadata": {}, - "source": [ - "---\n", - "sidebar_label: __ModuleName__\n", - "---" - ] + "cells": [ + { + "cell_type": "raw", + "id": "67db2992", + "metadata": {}, + "source": [ + "---\n", + "sidebar_label: __ModuleName__\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "9597802c", + "metadata": {}, + "source": [ + "# __ModuleName__LLM\n", + "\n", + "- [ ] TODO: Make sure API reference link is correct\n", + "\n", + "This will help you get started with __ModuleName__ completion models (LLMs) using LangChain. For detailed documentation on `__ModuleName__LLM` features and configuration options, please refer to the [API reference](https://api.python.langchain.com/en/latest/llms/__module_name__.llms.__ModuleName__LLM.html).\n", + "\n", + "## Overview\n", + "### Integration details\n", + "\n", + "- TODO: Fill in table features.\n", + "- TODO: Remove JS support link if not relevant, otherwise ensure link is correct.\n", + "- TODO: Make sure API reference links are correct.\n", + "\n", + "| Class | Package | Local | Serializable | [JS support](https://js.langchain.com/docs/integrations/llms/__package_name_short_snake__) | Package downloads | Package latest |\n", + "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n", + "| [__ModuleName__LLM](https://api.python.langchain.com/en/latest/llms/__module_name__.llms.__ModuleName__LLM.html) | [__package_name__](https://api.python.langchain.com/en/latest/__package_name_short_snake___api_reference.html) | βœ…/❌ | beta/❌ | βœ…/❌ | ![PyPI - Downloads](https://img.shields.io/pypi/dm/__package_name__&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/__package_name__&label=%20) |\n", + "\n", + "## Setup\n", + "\n", + "- TODO: Update with relevant info.\n", + "\n", + "To access __ModuleName__ models you'll need to create a/an __ModuleName__ account, get an API key, and install the `__package_name__` integration package.\n", + "\n", + "### Credentials\n", + "\n", + "- TODO: Update with relevant info.\n", + "\n", + "Head to (TODO: link) to sign up to __ModuleName__ and generate an API key. Once you've done this set the __MODULE_NAME___API_KEY environment variable:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bc51e756", + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "if not os.getenv(\"__MODULE_NAME___API_KEY\"):\n", + " os.environ[\"__MODULE_NAME___API_KEY\"] = getpass.getpass(\n", + " \"Enter your __ModuleName__ API key: \"\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "4b6e1ca6", + "metadata": {}, + "source": [ + "To enable automated tracing of your model calls, set your [LangSmith](https://docs.smith.langchain.com/) API key:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "196c2b41", + "metadata": {}, + "outputs": [], + "source": [ + "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"\n", + "# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")" + ] + }, + { + "cell_type": "markdown", + "id": "809c6577", + "metadata": {}, + "source": [ + "### Installation\n", + "\n", + "The LangChain __ModuleName__ integration lives in the `__package_name__` package:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "59c710c4", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install -qU __package_name__" + ] + }, + { + "cell_type": "markdown", + "id": "0a760037", + "metadata": {}, + "source": [ + "## Instantiation\n", + "\n", + "Now we can instantiate our model object and generate chat completions:\n", + "\n", + "- TODO: Update model instantiation with relevant params." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0562a13", + "metadata": {}, + "outputs": [], + "source": [ + "from __module_name__ import __ModuleName__LLM\n", + "\n", + "model = __ModuleName__LLM(\n", + " model=\"model-name\",\n", + " temperature=0,\n", + " max_tokens=None,\n", + " timeout=None,\n", + " max_retries=2,\n", + " # other params...\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "0ee90032", + "metadata": {}, + "source": [ + "## Invocation\n", + "\n", + "- [ ] TODO: Run cells so output can be seen." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "035dea0f", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "input_text = \"__ModuleName__ is an AI company that \"\n", + "\n", + "completion = model.invoke(input_text)\n", + "completion" + ] + }, + { + "cell_type": "markdown", + "id": "add38532", + "metadata": {}, + "source": [ + "## Chaining\n", + "\n", + "We can [chain](/docs/how_to/sequence/) our completion model with a prompt template like so:\n", + "\n", + "- TODO: Run cells so output can be seen." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "078e9db2", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_core.prompts import PromptTemplate\n", + "\n", + "prompt = PromptTemplate(\"How to say {input} in {output_language}:\\n\")\n", + "\n", + "chain = prompt | model\n", + "chain.invoke(\n", + " {\n", + " \"output_language\": \"German\",\n", + " \"input\": \"I love programming.\",\n", + " }\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "e99eef30", + "metadata": {}, + "source": [ + "## TODO: Any functionality specific to this model provider\n", + "\n", + "E.g. creating/using finetuned models via this provider. Delete if not relevant" + ] + }, + { + "cell_type": "markdown", + "id": "e9bdfcef", + "metadata": {}, + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all `__ModuleName__LLM` features and configurations head to the API reference: https://api.python.langchain.com/en/latest/llms/__module_name__.llms.__ModuleName__LLM.html" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.11.1 64-bit", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + }, + "vscode": { + "interpreter": { + "hash": "e971737741ff4ec9aff7dc6155a1060a59a8a6d52c757dbbe66bf8ee389494b1" + } + } }, - { - "cell_type": "markdown", - "id": "9597802c", - "metadata": {}, - "source": [ - "# __ModuleName__LLM\n", - "\n", - "- [ ] TODO: Make sure API reference link is correct\n", - "\n", - "This will help you get started with __ModuleName__ completion models (LLMs) using LangChain. For detailed documentation on `__ModuleName__LLM` features and configuration options, please refer to the [API reference](https://api.python.langchain.com/en/latest/llms/__module_name__.llms.__ModuleName__LLM.html).\n", - "\n", - "## Overview\n", - "### Integration details\n", - "\n", - "- TODO: Fill in table features.\n", - "- TODO: Remove JS support link if not relevant, otherwise ensure link is correct.\n", - "- TODO: Make sure API reference links are correct.\n", - "\n", - "| Class | Package | Local | Serializable | [JS support](https://js.langchain.com/docs/integrations/llms/__package_name_short_snake__) | Package downloads | Package latest |\n", - "| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n", - "| [__ModuleName__LLM](https://api.python.langchain.com/en/latest/llms/__module_name__.llms.__ModuleName__LLM.html) | [__package_name__](https://api.python.langchain.com/en/latest/__package_name_short_snake___api_reference.html) | βœ…/❌ | beta/❌ | βœ…/❌ | ![PyPI - Downloads](https://img.shields.io/pypi/dm/__package_name__?style=flat-square&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/__package_name__?style=flat-square&label=%20) |\n", - "\n", - "## Setup\n", - "\n", - "- TODO: Update with relevant info.\n", - "\n", - "To access __ModuleName__ models you'll need to create a/an __ModuleName__ account, get an API key, and install the `__package_name__` integration package.\n", - "\n", - "### Credentials\n", - "\n", - "- TODO: Update with relevant info.\n", - "\n", - "Head to (TODO: link) to sign up to __ModuleName__ and generate an API key. Once you've done this set the __MODULE_NAME___API_KEY environment variable:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bc51e756", - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "import os\n", - "\n", - "if not os.getenv(\"__MODULE_NAME___API_KEY\"):\n", - " os.environ[\"__MODULE_NAME___API_KEY\"] = getpass.getpass(\n", - " \"Enter your __ModuleName__ API key: \"\n", - " )" - ] - }, - { - "cell_type": "markdown", - "id": "4b6e1ca6", - "metadata": {}, - "source": "To enable automated tracing of your model calls, set your [LangSmith](https://docs.smith.langchain.com/) API key:" - }, - { - "cell_type": "code", - "execution_count": null, - "id": "196c2b41", - "metadata": {}, - "outputs": [], - "source": [ - "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"\n", - "# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")" - ] - }, - { - "cell_type": "markdown", - "id": "809c6577", - "metadata": {}, - "source": [ - "### Installation\n", - "\n", - "The LangChain __ModuleName__ integration lives in the `__package_name__` package:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "59c710c4", - "metadata": {}, - "outputs": [], - "source": [ - "%pip install -qU __package_name__" - ] - }, - { - "cell_type": "markdown", - "id": "0a760037", - "metadata": {}, - "source": [ - "## Instantiation\n", - "\n", - "Now we can instantiate our model object and generate chat completions:\n", - "\n", - "- TODO: Update model instantiation with relevant params." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0562a13", - "metadata": {}, - "outputs": [], - "source": [ - "from __module_name__ import __ModuleName__LLM\n", - "\n", - "llm = __ModuleName__LLM(\n", - " model=\"model-name\",\n", - " temperature=0,\n", - " max_tokens=None,\n", - " timeout=None,\n", - " max_retries=2,\n", - " # other params...\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "0ee90032", - "metadata": {}, - "source": [ - "## Invocation\n", - "\n", - "- [ ] TODO: Run cells so output can be seen." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "035dea0f", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "input_text = \"__ModuleName__ is an AI company that \"\n", - "\n", - "completion = llm.invoke(input_text)\n", - "completion" - ] - }, - { - "cell_type": "markdown", - "id": "add38532", - "metadata": {}, - "source": [ - "## Chaining\n", - "\n", - "We can [chain](/docs/how_to/sequence/) our completion model with a prompt template like so:\n", - "\n", - "- TODO: Run cells so output can be seen." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "078e9db2", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_core.prompts import PromptTemplate\n", - "\n", - "prompt = PromptTemplate(\"How to say {input} in {output_language}:\\n\")\n", - "\n", - "chain = prompt | llm\n", - "chain.invoke(\n", - " {\n", - " \"output_language\": \"German\",\n", - " \"input\": \"I love programming.\",\n", - " }\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "e99eef30", - "metadata": {}, - "source": [ - "## TODO: Any functionality specific to this model provider\n", - "\n", - "E.g. creating/using finetuned models via this provider. Delete if not relevant" - ] - }, - { - "cell_type": "markdown", - "id": "e9bdfcef", - "metadata": {}, - "source": [ - "## API reference\n", - "\n", - "For detailed documentation of all `__ModuleName__LLM` features and configurations head to the API reference: https://api.python.langchain.com/en/latest/llms/__module_name__.llms.__ModuleName__LLM.html" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3.11.1 64-bit", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.7" - }, - "vscode": { - "interpreter": { - "hash": "e971737741ff4ec9aff7dc6155a1060a59a8a6d52c757dbbe66bf8ee389494b1" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/libs/cli/langchain_cli/integration_template/docs/retrievers.ipynb b/libs/cli/langchain_cli/integration_template/docs/retrievers.ipynb index 300d3d87958..254633bdf23 100644 --- a/libs/cli/langchain_cli/integration_template/docs/retrievers.ipynb +++ b/libs/cli/langchain_cli/integration_template/docs/retrievers.ipynb @@ -155,7 +155,7 @@ "\n", "from langchain_openai import ChatOpenAI\n", "\n", - "llm = ChatOpenAI(model=\"gpt-3.5-turbo-0125\", temperature=0)" + "model = ChatOpenAI(model=\"gpt-3.5-turbo-0125\", temperature=0)" ] }, { @@ -185,7 +185,7 @@ "chain = (\n", " {\"context\": retriever | format_docs, \"question\": RunnablePassthrough()}\n", " | prompt\n", - " | llm\n", + " | model\n", " | StrOutputParser()\n", ")" ] diff --git a/libs/cli/langchain_cli/integration_template/docs/stores.ipynb b/libs/cli/langchain_cli/integration_template/docs/stores.ipynb index 5daa0568c4a..e250dcfa627 100644 --- a/libs/cli/langchain_cli/integration_template/docs/stores.ipynb +++ b/libs/cli/langchain_cli/integration_template/docs/stores.ipynb @@ -1,204 +1,204 @@ { - "cells": [ - { - "cell_type": "raw", - "metadata": { - "vscode": { - "languageId": "raw" + "cells": [ + { + "cell_type": "raw", + "metadata": { + "vscode": { + "languageId": "raw" + } + }, + "source": [ + "---\n", + "sidebar_label: __ModuleName__ByteStore\n", + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# __ModuleName__ByteStore\n", + "\n", + "- TODO: Make sure API reference link is correct.\n", + "\n", + "This will help you get started with __ModuleName__ [key-value stores](/docs/concepts/#key-value-stores). For detailed documentation of all __ModuleName__ByteStore features and configurations head to the [API reference](https://python.langchain.com/v0.2/api_reference/core/stores/langchain_core.stores.__module_name__ByteStore.html).\n", + "\n", + "- TODO: Add any other relevant links, like information about models, prices, context windows, etc. See https://python.langchain.com/docs/integrations/stores/in_memory/ for an example.\n", + "\n", + "## Overview\n", + "\n", + "- TODO: (Optional) A short introduction to the underlying technology/API.\n", + "\n", + "### Integration details\n", + "\n", + "- TODO: Fill in table features.\n", + "- TODO: Remove JS support link if not relevant, otherwise ensure link is correct.\n", + "- TODO: Make sure API reference links are correct.\n", + "\n", + "| Class | Package | Local | [JS support](https://js.langchain.com/docs/integrations/stores/_package_name_) | Package downloads | Package latest |\n", + "| :--- | :--- | :---: | :---: | :---: | :---: |\n", + "| [__ModuleName__ByteStore](https://api.python.langchain.com/en/latest/stores/__module_name__.stores.__ModuleName__ByteStore.html) | [__package_name__](https://api.python.langchain.com/en/latest/__package_name_short_snake___api_reference.html) | βœ…/❌ | βœ…/❌ | ![PyPI - Downloads](https://img.shields.io/pypi/dm/__package_name__&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/__package_name__&label=%20) |\n", + "\n", + "## Setup\n", + "\n", + "- TODO: Update with relevant info.\n", + "\n", + "To create a __ModuleName__ byte store, you'll need to create a/an __ModuleName__ account, get an API key, and install the `__package_name__` integration package.\n", + "\n", + "### Credentials\n", + "\n", + "- TODO: Update with relevant info, or omit if the service does not require any credentials.\n", + "\n", + "Head to (TODO: link) to sign up to __ModuleName__ and generate an API key. Once you've done this set the __MODULE_NAME___API_KEY environment variable:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "if not os.getenv(\"__MODULE_NAME___API_KEY\"):\n", + " os.environ[\"__MODULE_NAME___API_KEY\"] = getpass.getpass(\n", + " \"Enter your __ModuleName__ API key: \"\n", + " )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Installation\n", + "\n", + "The LangChain __ModuleName__ integration lives in the `__package_name__` package:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install -qU __package_name__" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Instantiation\n", + "\n", + "Now we can instantiate our byte store:\n", + "\n", + "- TODO: Update model instantiation with relevant params." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from __module_name__ import __ModuleName__ByteStore\n", + "\n", + "kv_store = __ModuleName__ByteStore(\n", + " # params...\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Usage\n", + "\n", + "- TODO: Run cells so output can be seen.\n", + "\n", + "You can set data under keys like this using the `mset` method:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "kv_store.mset(\n", + " [\n", + " [\"key1\", b\"value1\"],\n", + " [\"key2\", b\"value2\"],\n", + " ]\n", + ")\n", + "\n", + "kv_store.mget(\n", + " [\n", + " \"key1\",\n", + " \"key2\",\n", + " ]\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And you can delete data using the `mdelete` method:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "kv_store.mdelete(\n", + " [\n", + " \"key1\",\n", + " \"key2\",\n", + " ]\n", + ")\n", + "\n", + "kv_store.mget(\n", + " [\n", + " \"key1\",\n", + " \"key2\",\n", + " ]\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## TODO: Any functionality specific to this key-value store provider\n", + "\n", + "E.g. extra initialization. Delete if not relevant." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all __ModuleName__ByteStore features and configurations, head to the API reference: https://api.python.langchain.com/en/latest/stores/__module_name__.stores.__ModuleName__ByteStore.html" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.10.5" } - }, - "source": [ - "---\n", - "sidebar_label: __ModuleName__ByteStore\n", - "---" - ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# __ModuleName__ByteStore\n", - "\n", - "- TODO: Make sure API reference link is correct.\n", - "\n", - "This will help you get started with __ModuleName__ [key-value stores](/docs/concepts/#key-value-stores). For detailed documentation of all __ModuleName__ByteStore features and configurations head to the [API reference](https://python.langchain.com/v0.2/api_reference/core/stores/langchain_core.stores.__module_name__ByteStore.html).\n", - "\n", - "- TODO: Add any other relevant links, like information about models, prices, context windows, etc. See https://python.langchain.com/docs/integrations/stores/in_memory/ for an example.\n", - "\n", - "## Overview\n", - "\n", - "- TODO: (Optional) A short introduction to the underlying technology/API.\n", - "\n", - "### Integration details\n", - "\n", - "- TODO: Fill in table features.\n", - "- TODO: Remove JS support link if not relevant, otherwise ensure link is correct.\n", - "- TODO: Make sure API reference links are correct.\n", - "\n", - "| Class | Package | Local | [JS support](https://js.langchain.com/docs/integrations/stores/_package_name_) | Package downloads | Package latest |\n", - "| :--- | :--- | :---: | :---: | :---: | :---: |\n", - "| [__ModuleName__ByteStore](https://api.python.langchain.com/en/latest/stores/__module_name__.stores.__ModuleName__ByteStore.html) | [__package_name__](https://api.python.langchain.com/en/latest/__package_name_short_snake___api_reference.html) | βœ…/❌ | βœ…/❌ | ![PyPI - Downloads](https://img.shields.io/pypi/dm/__package_name__?style=flat-square&label=%20) | ![PyPI - Version](https://img.shields.io/pypi/v/__package_name__?style=flat-square&label=%20) |\n", - "\n", - "## Setup\n", - "\n", - "- TODO: Update with relevant info.\n", - "\n", - "To create a __ModuleName__ byte store, you'll need to create a/an __ModuleName__ account, get an API key, and install the `__package_name__` integration package.\n", - "\n", - "### Credentials\n", - "\n", - "- TODO: Update with relevant info, or omit if the service does not require any credentials.\n", - "\n", - "Head to (TODO: link) to sign up to __ModuleName__ and generate an API key. Once you've done this set the __MODULE_NAME___API_KEY environment variable:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "import os\n", - "\n", - "if not os.getenv(\"__MODULE_NAME___API_KEY\"):\n", - " os.environ[\"__MODULE_NAME___API_KEY\"] = getpass.getpass(\n", - " \"Enter your __ModuleName__ API key: \"\n", - " )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Installation\n", - "\n", - "The LangChain __ModuleName__ integration lives in the `__package_name__` package:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%pip install -qU __package_name__" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Instantiation\n", - "\n", - "Now we can instantiate our byte store:\n", - "\n", - "- TODO: Update model instantiation with relevant params." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from __module_name__ import __ModuleName__ByteStore\n", - "\n", - "kv_store = __ModuleName__ByteStore(\n", - " # params...\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Usage\n", - "\n", - "- TODO: Run cells so output can be seen.\n", - "\n", - "You can set data under keys like this using the `mset` method:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "kv_store.mset(\n", - " [\n", - " [\"key1\", b\"value1\"],\n", - " [\"key2\", b\"value2\"],\n", - " ]\n", - ")\n", - "\n", - "kv_store.mget(\n", - " [\n", - " \"key1\",\n", - " \"key2\",\n", - " ]\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "And you can delete data using the `mdelete` method:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "kv_store.mdelete(\n", - " [\n", - " \"key1\",\n", - " \"key2\",\n", - " ]\n", - ")\n", - "\n", - "kv_store.mget(\n", - " [\n", - " \"key1\",\n", - " \"key2\",\n", - " ]\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## TODO: Any functionality specific to this key-value store provider\n", - "\n", - "E.g. extra initialization. Delete if not relevant." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## API reference\n", - "\n", - "For detailed documentation of all __ModuleName__ByteStore features and configurations, head to the API reference: https://api.python.langchain.com/en/latest/stores/__module_name__.stores.__ModuleName__ByteStore.html" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "name": "python", - "version": "3.10.5" - } - }, - "nbformat": 4, - "nbformat_minor": 2 + "nbformat": 4, + "nbformat_minor": 2 } diff --git a/libs/cli/langchain_cli/integration_template/docs/tools.ipynb b/libs/cli/langchain_cli/integration_template/docs/tools.ipynb index a160b95658a..0310a839850 100644 --- a/libs/cli/langchain_cli/integration_template/docs/tools.ipynb +++ b/libs/cli/langchain_cli/integration_template/docs/tools.ipynb @@ -1,271 +1,271 @@ { - "cells": [ - { - "cell_type": "raw", - "id": "10238e62-3465-4973-9279-606cbb7ccf16", - "metadata": {}, - "source": [ - "---\n", - "sidebar_label: __ModuleName__\n", - "---" - ] + "cells": [ + { + "cell_type": "raw", + "id": "10238e62-3465-4973-9279-606cbb7ccf16", + "metadata": {}, + "source": [ + "---\n", + "sidebar_label: __ModuleName__\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "a6f91f20", + "metadata": {}, + "source": [ + "# __ModuleName__\n", + "\n", + "- TODO: Make sure API reference link is correct.\n", + "\n", + "This notebook provides a quick overview for getting started with __ModuleName__ [tool](/docs/integrations/tools/). For detailed documentation of all __ModuleName__ features and configurations head to the [API reference](https://python.langchain.com/v0.2/api_reference/community/tools/langchain_community.tools.__module_name__.tool.__ModuleName__.html).\n", + "\n", + "- TODO: Add any other relevant links, like information about underlying API, etc.\n", + "\n", + "## Overview\n", + "\n", + "### Integration details\n", + "\n", + "- TODO: Make sure links and features are correct\n", + "\n", + "| Class | Package | Serializable | [JS support](https://js.langchain.com/docs/integrations/tools/__module_name__) | Package latest |\n", + "| :--- | :--- | :---: | :---: | :---: |\n", + "| [__ModuleName__](https://python.langchain.com/v0.2/api_reference/community/tools/langchain_community.tools.__module_name__.tool.__ModuleName__.html) | [langchain-community](https://api.python.langchain.com/en/latest/community_api_reference.html) | beta/❌ | βœ…/❌ | ![PyPI - Version](https://img.shields.io/pypi/v/langchain-community&label=%20) |\n", + "\n", + "### Tool features\n", + "\n", + "- TODO: Add feature table if it makes sense\n", + "\n", + "\n", + "## Setup\n", + "\n", + "- TODO: Add any additional deps\n", + "\n", + "The integration lives in the `langchain-community` package." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f85b4089", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install --quiet -U langchain-community" + ] + }, + { + "cell_type": "markdown", + "id": "b15e9266", + "metadata": {}, + "source": [ + "### Credentials\n", + "\n", + "- TODO: Add any credentials that are needed" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "e0b178a2-8816-40ca-b57c-ccdd86dde9c9", + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "# if not os.environ.get(\"__MODULE_NAME___API_KEY\"):\n", + "# os.environ[\"__MODULE_NAME___API_KEY\"] = getpass.getpass(\"__MODULE_NAME__ API key:\\n\")" + ] + }, + { + "cell_type": "markdown", + "id": "bc5ab717-fd27-4c59-b912-bdd099541478", + "metadata": {}, + "source": [ + "It's also helpful (but not needed) to set up [LangSmith](https://smith.langchain.com/) for best-in-class observability:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "a6c2f136-6367-4f1f-825d-ae741e1bf281", + "metadata": {}, + "outputs": [], + "source": [ + "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"\n", + "# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass()" + ] + }, + { + "cell_type": "markdown", + "id": "1c97218f-f366-479d-8bf7-fe9f2f6df73f", + "metadata": {}, + "source": [ + "## Instantiation\n", + "\n", + "- TODO: Fill in instantiation params\n", + "\n", + "Here we show how to instantiate an instance of the __ModuleName__ tool, with " + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "8b3ddfe9-ca79-494c-a7ab-1f56d9407a64", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_community.tools import __ModuleName__\n", + "\n", + "\n", + "tool = __ModuleName__(...)" + ] + }, + { + "cell_type": "markdown", + "id": "74147a1a", + "metadata": {}, + "source": [ + "## Invocation\n", + "\n", + "### [Invoke directly with args](/docs/concepts/tools/#use-the-tool-directly)\n", + "\n", + "- TODO: Describe what the tool args are, fill them in, run cell" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "65310a8b-eb0c-4d9e-a618-4f4abe2414fc", + "metadata": {}, + "outputs": [], + "source": [ + "tool.invoke({...})" + ] + }, + { + "cell_type": "markdown", + "id": "d6e73897", + "metadata": {}, + "source": [ + "### [Invoke with ToolCall](/docs/concepts/tool_calling/#tool-execution)\n", + "\n", + "We can also invoke the tool with a model-generated ToolCall, in which case a ToolMessage will be returned:\n", + "\n", + "- TODO: Fill in tool args and run cell" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f90e33a7", + "metadata": {}, + "outputs": [], + "source": [ + "# This is usually generated by a model, but we'll create a tool call directly for demo purposes.\n", + "model_generated_tool_call = {\n", + " \"args\": {...}, # TODO: FILL IN\n", + " \"id\": \"1\",\n", + " \"name\": tool.name,\n", + " \"type\": \"tool_call\",\n", + "}\n", + "tool.invoke(model_generated_tool_call)" + ] + }, + { + "cell_type": "markdown", + "id": "659f9fbd-6fcf-445f-aa8c-72d8e60154bd", + "metadata": {}, + "source": [ + "## Use within an agent\n", + "\n", + "- TODO: Add user question and run cells\n", + "\n", + "We can use our tool in an [agent](/docs/concepts/agents/). For this we will need a LLM with [tool-calling](/docs/how_to/tool_calling/) capabilities:\n", + "\n", + "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "af3123ad-7a02-40e5-b58e-7d56e23e5830", + "metadata": {}, + "outputs": [], + "source": [ + "# | output: false\n", + "# | echo: false\n", + "\n", + "# !pip install -qU langchain langchain-openai\n", + "from langchain.chat_models import init_chat_model\n", + "\n", + "model = init_chat_model(model=\"gpt-4o\", model_provider=\"openai\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bea35fa1", + "metadata": {}, + "outputs": [], + "source": [ + "from langgraph.prebuilt import create_react_agent\n", + "\n", + "tools = [tool]\n", + "agent = create_react_agent(model, tools)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fdbf35b5-3aaf-4947-9ec6-48c21533fb95", + "metadata": {}, + "outputs": [], + "source": [ + "example_query = \"...\"\n", + "\n", + "events = agent.stream(\n", + " {\"messages\": [(\"user\", example_query)]},\n", + " stream_mode=\"values\",\n", + ")\n", + "for event in events:\n", + " event[\"messages\"][-1].pretty_print()" + ] + }, + { + "cell_type": "markdown", + "id": "4ac8146c", + "metadata": {}, + "source": [ + "## API reference\n", + "\n", + "For detailed documentation of all __ModuleName__ features and configurations head to the API reference: https://python.langchain.com/v0.2/api_reference/community/tools/langchain_community.tools.__module_name__.tool.__ModuleName__.html" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "poetry-venv-311", + "language": "python", + "name": "poetry-venv-311" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } }, - { - "cell_type": "markdown", - "id": "a6f91f20", - "metadata": {}, - "source": [ - "# __ModuleName__\n", - "\n", - "- TODO: Make sure API reference link is correct.\n", - "\n", - "This notebook provides a quick overview for getting started with __ModuleName__ [tool](/docs/integrations/tools/). For detailed documentation of all __ModuleName__ features and configurations head to the [API reference](https://python.langchain.com/v0.2/api_reference/community/tools/langchain_community.tools.__module_name__.tool.__ModuleName__.html).\n", - "\n", - "- TODO: Add any other relevant links, like information about underlying API, etc.\n", - "\n", - "## Overview\n", - "\n", - "### Integration details\n", - "\n", - "- TODO: Make sure links and features are correct\n", - "\n", - "| Class | Package | Serializable | [JS support](https://js.langchain.com/docs/integrations/tools/__module_name__) | Package latest |\n", - "| :--- | :--- | :---: | :---: | :---: |\n", - "| [__ModuleName__](https://python.langchain.com/v0.2/api_reference/community/tools/langchain_community.tools.__module_name__.tool.__ModuleName__.html) | [langchain-community](https://api.python.langchain.com/en/latest/community_api_reference.html) | beta/❌ | βœ…/❌ | ![PyPI - Version](https://img.shields.io/pypi/v/langchain-community?style=flat-square&label=%20) |\n", - "\n", - "### Tool features\n", - "\n", - "- TODO: Add feature table if it makes sense\n", - "\n", - "\n", - "## Setup\n", - "\n", - "- TODO: Add any additional deps\n", - "\n", - "The integration lives in the `langchain-community` package." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f85b4089", - "metadata": {}, - "outputs": [], - "source": [ - "%pip install --quiet -U langchain-community" - ] - }, - { - "cell_type": "markdown", - "id": "b15e9266", - "metadata": {}, - "source": [ - "### Credentials\n", - "\n", - "- TODO: Add any credentials that are needed" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "e0b178a2-8816-40ca-b57c-ccdd86dde9c9", - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "import os\n", - "\n", - "# if not os.environ.get(\"__MODULE_NAME___API_KEY\"):\n", - "# os.environ[\"__MODULE_NAME___API_KEY\"] = getpass.getpass(\"__MODULE_NAME__ API key:\\n\")" - ] - }, - { - "cell_type": "markdown", - "id": "bc5ab717-fd27-4c59-b912-bdd099541478", - "metadata": {}, - "source": [ - "It's also helpful (but not needed) to set up [LangSmith](https://smith.langchain.com/) for best-in-class observability:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "a6c2f136-6367-4f1f-825d-ae741e1bf281", - "metadata": {}, - "outputs": [], - "source": [ - "# os.environ[\"LANGSMITH_TRACING\"] = \"true\"\n", - "# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass()" - ] - }, - { - "cell_type": "markdown", - "id": "1c97218f-f366-479d-8bf7-fe9f2f6df73f", - "metadata": {}, - "source": [ - "## Instantiation\n", - "\n", - "- TODO: Fill in instantiation params\n", - "\n", - "Here we show how to instantiate an instance of the __ModuleName__ tool, with " - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "8b3ddfe9-ca79-494c-a7ab-1f56d9407a64", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_community.tools import __ModuleName__\n", - "\n", - "\n", - "tool = __ModuleName__(...)" - ] - }, - { - "cell_type": "markdown", - "id": "74147a1a", - "metadata": {}, - "source": [ - "## Invocation\n", - "\n", - "### [Invoke directly with args](/docs/concepts/tools/#use-the-tool-directly)\n", - "\n", - "- TODO: Describe what the tool args are, fill them in, run cell" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "65310a8b-eb0c-4d9e-a618-4f4abe2414fc", - "metadata": {}, - "outputs": [], - "source": [ - "tool.invoke({...})" - ] - }, - { - "cell_type": "markdown", - "id": "d6e73897", - "metadata": {}, - "source": [ - "### [Invoke with ToolCall](/docs/concepts/tool_calling/#tool-execution)\n", - "\n", - "We can also invoke the tool with a model-generated ToolCall, in which case a ToolMessage will be returned:\n", - "\n", - "- TODO: Fill in tool args and run cell" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f90e33a7", - "metadata": {}, - "outputs": [], - "source": [ - "# This is usually generated by a model, but we'll create a tool call directly for demo purposes.\n", - "model_generated_tool_call = {\n", - " \"args\": {...}, # TODO: FILL IN\n", - " \"id\": \"1\",\n", - " \"name\": tool.name,\n", - " \"type\": \"tool_call\",\n", - "}\n", - "tool.invoke(model_generated_tool_call)" - ] - }, - { - "cell_type": "markdown", - "id": "659f9fbd-6fcf-445f-aa8c-72d8e60154bd", - "metadata": {}, - "source": [ - "## Use within an agent\n", - "\n", - "- TODO: Add user question and run cells\n", - "\n", - "We can use our tool in an [agent](/docs/concepts/agents/). For this we will need a LLM with [tool-calling](/docs/how_to/tool_calling/) capabilities:\n", - "\n", - "import ChatModelTabs from \"@theme/ChatModelTabs\";\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "af3123ad-7a02-40e5-b58e-7d56e23e5830", - "metadata": {}, - "outputs": [], - "source": [ - "# | output: false\n", - "# | echo: false\n", - "\n", - "# !pip install -qU langchain langchain-openai\n", - "from langchain.chat_models import init_chat_model\n", - "\n", - "llm = init_chat_model(model=\"gpt-4o\", model_provider=\"openai\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bea35fa1", - "metadata": {}, - "outputs": [], - "source": [ - "from langgraph.prebuilt import create_react_agent\n", - "\n", - "tools = [tool]\n", - "agent = create_react_agent(llm, tools)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "fdbf35b5-3aaf-4947-9ec6-48c21533fb95", - "metadata": {}, - "outputs": [], - "source": [ - "example_query = \"...\"\n", - "\n", - "events = agent.stream(\n", - " {\"messages\": [(\"user\", example_query)]},\n", - " stream_mode=\"values\",\n", - ")\n", - "for event in events:\n", - " event[\"messages\"][-1].pretty_print()" - ] - }, - { - "cell_type": "markdown", - "id": "4ac8146c", - "metadata": {}, - "source": [ - "## API reference\n", - "\n", - "For detailed documentation of all __ModuleName__ features and configurations head to the API reference: https://python.langchain.com/v0.2/api_reference/community/tools/langchain_community.tools.__module_name__.tool.__ModuleName__.html" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "poetry-venv-311", - "language": "python", - "name": "poetry-venv-311" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.9" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/libs/cli/langchain_cli/integration_template/docs/vectorstores.ipynb b/libs/cli/langchain_cli/integration_template/docs/vectorstores.ipynb index 7f7f58b15c9..c77e495ad72 100644 --- a/libs/cli/langchain_cli/integration_template/docs/vectorstores.ipynb +++ b/libs/cli/langchain_cli/integration_template/docs/vectorstores.ipynb @@ -295,7 +295,7 @@ "source": [ "## TODO: Any functionality specific to this vector store\n", "\n", - "E.g. creating a persisten database to save to your disk, etc." + "E.g. creating a persistent database to save to your disk, etc." ] }, { diff --git a/libs/cli/langchain_cli/integration_template/integration_template/chat_models.py b/libs/cli/langchain_cli/integration_template/integration_template/chat_models.py index 0e077cd4856..740da85137e 100644 --- a/libs/cli/langchain_cli/integration_template/integration_template/chat_models.py +++ b/libs/cli/langchain_cli/integration_template/integration_template/chat_models.py @@ -36,20 +36,20 @@ class Chat__ModuleName__(BaseChatModel): # TODO: Populate with relevant params. Key init args β€” completion params: - model: str + model: Name of __ModuleName__ model to use. - temperature: float + temperature: Sampling temperature. - max_tokens: int | None + max_tokens: Max number of tokens to generate. # TODO: Populate with relevant params. Key init args β€” client params: - timeout: float | None + timeout: Timeout for requests. - max_retries: int + max_retries: Max number of retries. - api_key: str | None + api_key: __ModuleName__ API key. If not passed in will be read from env var __MODULE_NAME___API_KEY. @@ -60,7 +60,7 @@ class Chat__ModuleName__(BaseChatModel): ```python from __module_name__ import Chat__ModuleName__ - llm = Chat__ModuleName__( + model = Chat__ModuleName__( model="...", temperature=0, max_tokens=None, @@ -77,7 +77,7 @@ class Chat__ModuleName__(BaseChatModel): ("system", "You are a helpful translator. Translate the user sentence to French."), ("human", "I love programming."), ] - llm.invoke(messages) + model.invoke(messages) ``` ```python @@ -87,7 +87,7 @@ class Chat__ModuleName__(BaseChatModel): # TODO: Delete if token-level streaming isn't supported. Stream: ```python - for chunk in llm.stream(messages): + for chunk in model.stream(messages): print(chunk.text, end="") ``` @@ -96,7 +96,7 @@ class Chat__ModuleName__(BaseChatModel): ``` ```python - stream = llm.stream(messages) + stream = model.stream(messages) full = next(stream) for chunk in stream: full += chunk @@ -110,13 +110,13 @@ class Chat__ModuleName__(BaseChatModel): # TODO: Delete if native async isn't supported. Async: ```python - await llm.ainvoke(messages) + await model.ainvoke(messages) # stream: - # async for chunk in (await llm.astream(messages)) + # async for chunk in (await model.astream(messages)) # batch: - # await llm.abatch([messages]) + # await model.abatch([messages]) ``` ```python @@ -137,8 +137,8 @@ class Chat__ModuleName__(BaseChatModel): location: str = Field(..., description="The city and state, e.g. San Francisco, CA") - llm_with_tools = llm.bind_tools([GetWeather, GetPopulation]) - ai_msg = llm_with_tools.invoke("Which city is hotter today and which is bigger: LA or NY?") + model_with_tools = model.bind_tools([GetWeather, GetPopulation]) + ai_msg = model_with_tools.invoke("Which city is hotter today and which is bigger: LA or NY?") ai_msg.tool_calls ``` @@ -162,8 +162,8 @@ class Chat__ModuleName__(BaseChatModel): punchline: str = Field(description="The punchline to the joke") rating: int | None = Field(description="How funny the joke is, from 1 to 10") - structured_llm = llm.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats") + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats") ``` ```python @@ -176,8 +176,8 @@ class Chat__ModuleName__(BaseChatModel): JSON mode: ```python # TODO: Replace with appropriate bind arg. - json_llm = llm.bind(response_format={"type": "json_object"}) - ai_msg = json_llm.invoke("Return a JSON object with key 'random_ints' and a value of 10 random ints in [0-99]") + json_model = model.bind(response_format={"type": "json_object"}) + ai_msg = json_model.invoke("Return a JSON object with key 'random_ints' and a value of 10 random ints in [0-99]") ai_msg.content ``` @@ -204,7 +204,7 @@ class Chat__ModuleName__(BaseChatModel): }, ], ) - ai_msg = llm.invoke([message]) + ai_msg = model.invoke([message]) ai_msg.content ``` @@ -235,7 +235,7 @@ class Chat__ModuleName__(BaseChatModel): # TODO: Delete if token usage metadata isn't supported. Token usage: ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.usage_metadata ``` @@ -247,8 +247,8 @@ class Chat__ModuleName__(BaseChatModel): Logprobs: ```python # TODO: Replace with appropriate bind arg. - logprobs_llm = llm.bind(logprobs=True) - ai_msg = logprobs_llm.invoke(messages) + logprobs_model = model.bind(logprobs=True) + ai_msg = logprobs_model.invoke(messages) ai_msg.response_metadata["logprobs"] ``` @@ -257,7 +257,7 @@ class Chat__ModuleName__(BaseChatModel): ``` Response metadata ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.response_metadata ``` diff --git a/libs/cli/langchain_cli/integration_template/integration_template/retrievers.py b/libs/cli/langchain_cli/integration_template/integration_template/retrievers.py index 48c5f735788..d4c6a966bfc 100644 --- a/libs/cli/langchain_cli/integration_template/integration_template/retrievers.py +++ b/libs/cli/langchain_cli/integration_template/integration_template/retrievers.py @@ -65,7 +65,7 @@ class __ModuleName__Retriever(BaseRetriever): Question: {question}\"\"\" ) - llm = ChatOpenAI(model="gpt-3.5-turbo-0125") + model = ChatOpenAI(model="gpt-3.5-turbo-0125") def format_docs(docs): return "\\n\\n".join(doc.page_content for doc in docs) @@ -73,7 +73,7 @@ class __ModuleName__Retriever(BaseRetriever): chain = ( {"context": retriever | format_docs, "question": RunnablePassthrough()} | prompt - | llm + | model | StrOutputParser() ) diff --git a/libs/cli/langchain_cli/integration_template/integration_template/vectorstores.py b/libs/cli/langchain_cli/integration_template/integration_template/vectorstores.py index 9969932f8da..48dc5980102 100644 --- a/libs/cli/langchain_cli/integration_template/integration_template/vectorstores.py +++ b/libs/cli/langchain_cli/integration_template/integration_template/vectorstores.py @@ -37,16 +37,16 @@ class __ModuleName__VectorStore(VectorStore): # TODO: Populate with relevant params. Key init args β€” indexing params: - collection_name: str + collection_name: Name of the collection. - embedding_function: Embeddings + embedding_function: Embedding function to use. # TODO: Populate with relevant params. Key init args β€” client params: - client: Client | None + client: Client to use. - connection_args: dict | None + connection_args: Connection arguments. # TODO: Replace with relevant init params. diff --git a/libs/cli/langchain_cli/namespaces/migrate/generate/utils.py b/libs/cli/langchain_cli/namespaces/migrate/generate/utils.py index e62f4b07db9..688276617c8 100644 --- a/libs/cli/langchain_cli/namespaces/migrate/generate/utils.py +++ b/libs/cli/langchain_cli/namespaces/migrate/generate/utils.py @@ -65,7 +65,7 @@ def is_subclass(class_obj: type, classes_: list[type]) -> bool: classes_: A list of classes to check against. Returns: - True if `class_obj` is a subclass of any class in `classes_`, False otherwise. + True if `class_obj` is a subclass of any class in `classes_`, `False` otherwise. """ return any( issubclass(class_obj, kls) diff --git a/libs/cli/langchain_cli/utils/git.py b/libs/cli/langchain_cli/utils/git.py index 36d99f55354..cf06b67ee6b 100644 --- a/libs/cli/langchain_cli/utils/git.py +++ b/libs/cli/langchain_cli/utils/git.py @@ -182,7 +182,7 @@ def parse_dependencies( inner_branches = _list_arg_to_length(branch, num_deps) return list( - map( # type: ignore[call-overload] + map( # type: ignore[call-overload, unused-ignore] parse_dependency_string, inner_deps, inner_repos, diff --git a/libs/cli/pyproject.toml b/libs/cli/pyproject.toml index 5c4a051336d..949756eac58 100644 --- a/libs/cli/pyproject.toml +++ b/libs/cli/pyproject.toml @@ -20,12 +20,13 @@ description = "CLI for interacting with LangChain" readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/cli" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-cli%3D%3D1%22" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/" +Documentation = "https://docs.langchain.com/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/cli" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-cli%3D%3D1%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [project.scripts] langchain = "langchain_cli.cli:app" @@ -42,14 +43,14 @@ lint = [ ] test = [ "langchain-core", - "langchain" + "langchain-classic" ] -typing = ["langchain"] +typing = ["langchain-classic"] test_integration = [] [tool.uv.sources] langchain-core = { path = "../core", editable = true } -langchain = { path = "../langchain", editable = true } +langchain-classic = { path = "../langchain", editable = true } [tool.ruff.format] docstring-code-format = true diff --git a/libs/cli/tests/unit_tests/migrate/generate/test_langchain_migration.py b/libs/cli/tests/unit_tests/migrate/generate/test_langchain_migration.py index 290a095609c..db243b5d470 100644 --- a/libs/cli/tests/unit_tests/migrate/generate/test_langchain_migration.py +++ b/libs/cli/tests/unit_tests/migrate/generate/test_langchain_migration.py @@ -1,5 +1,5 @@ import pytest -from langchain._api import suppress_langchain_deprecation_warning as sup2 +from langchain_classic._api import suppress_langchain_deprecation_warning as sup2 from langchain_core._api import suppress_langchain_deprecation_warning as sup1 from langchain_cli.namespaces.migrate.generate.generic import ( diff --git a/libs/cli/uv.lock b/libs/cli/uv.lock index b39b6a359ca..661cfb2defc 100644 --- a/libs/cli/uv.lock +++ b/libs/cli/uv.lock @@ -327,7 +327,21 @@ wheels = [ [[package]] name = "langchain" -version = "0.3.27" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph" }, + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/b8/36078257ba52351608129ee983079a4d77ee69eb1470ee248cd8f5728a31/langchain-1.0.0.tar.gz", hash = "sha256:56bf90d935ac1dda864519372d195ca58757b755dd4c44b87840b67d069085b7", size = 466932, upload-time = "2025-10-17T20:53:20.319Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/4d/2758a16ad01716c0fb3fe9ec205fd530eae4528b35a27ff44837c399e032/langchain-1.0.0-py3-none-any.whl", hash = "sha256:8c95e41250fc86d09a978fbdf999f86c18d50a28a2addc5da88546af00a1ad15", size = 106202, upload-time = "2025-10-17T20:53:18.685Z" }, +] + +[[package]] +name = "langchain-classic" +version = "1.0.0" source = { editable = "../langchain" } dependencies = [ { name = "async-timeout", marker = "python_full_version < '3.11'" }, @@ -344,20 +358,28 @@ dependencies = [ requires-dist = [ { name = "async-timeout", marker = "python_full_version < '3.11'", specifier = ">=4.0.0,<5.0.0" }, { name = "langchain-anthropic", marker = "extra == 'anthropic'" }, - { name = "langchain-community", marker = "extra == 'community'" }, + { name = "langchain-aws", marker = "extra == 'aws'" }, { name = "langchain-core", editable = "../core" }, + { name = "langchain-deepseek", marker = "extra == 'deepseek'" }, + { name = "langchain-fireworks", marker = "extra == 'fireworks'" }, { name = "langchain-google-genai", marker = "extra == 'google-genai'" }, { name = "langchain-google-vertexai", marker = "extra == 'google-vertexai'" }, + { name = "langchain-groq", marker = "extra == 'groq'" }, + { name = "langchain-huggingface", marker = "extra == 'huggingface'" }, + { name = "langchain-mistralai", marker = "extra == 'mistralai'" }, + { name = "langchain-ollama", marker = "extra == 'ollama'" }, { name = "langchain-openai", marker = "extra == 'openai'", editable = "../partners/openai" }, + { name = "langchain-perplexity", marker = "extra == 'perplexity'" }, { name = "langchain-text-splitters", editable = "../text-splitters" }, { name = "langchain-together", marker = "extra == 'together'" }, + { name = "langchain-xai", marker = "extra == 'xai'" }, { name = "langsmith", specifier = ">=0.1.17,<1.0.0" }, { name = "pydantic", specifier = ">=2.7.4,<3.0.0" }, { name = "pyyaml", specifier = ">=5.3.0,<7.0.0" }, { name = "requests", specifier = ">=2.0.0,<3.0.0" }, { name = "sqlalchemy", specifier = ">=1.4.0,<3.0.0" }, ] -provides-extras = ["community", "anthropic", "openai", "google-vertexai", "google-genai", "together"] +provides-extras = ["anthropic", "openai", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai", "perplexity"] [package.metadata.requires-dev] dev = [ @@ -376,7 +398,6 @@ test = [ { name = "blockbuster", specifier = ">=1.5.18,<1.6.0" }, { name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" }, { name = "cffi", marker = "python_full_version >= '3.10'" }, - { name = "duckdb-engine", specifier = ">=0.9.2,<1.0.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "langchain-core", editable = "../core" }, { name = "langchain-openai", editable = "../partners/openai" }, @@ -411,9 +432,10 @@ test-integration = [ { name = "wrapt", specifier = ">=1.15.0,<2.0.0" }, ] typing = [ + { name = "fastapi", specifier = ">=0.116.1,<1.0.0" }, { name = "langchain-core", editable = "../core" }, { name = "langchain-text-splitters", editable = "../text-splitters" }, - { name = "mypy", specifier = ">=1.15.0,<1.16.0" }, + { name = "mypy", specifier = ">=1.18.2,<1.19.0" }, { name = "mypy-protobuf", specifier = ">=3.0.0,<4.0.0" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, @@ -448,11 +470,11 @@ lint = [ { name = "ruff" }, ] test = [ - { name = "langchain" }, + { name = "langchain-classic" }, { name = "langchain-core" }, ] typing = [ - { name = "langchain" }, + { name = "langchain-classic" }, ] [package.metadata] @@ -475,15 +497,15 @@ lint = [ { name = "ruff", specifier = ">=0.13.1,<0.14" }, ] test = [ - { name = "langchain", editable = "../langchain" }, + { name = "langchain-classic", editable = "../langchain" }, { name = "langchain-core", editable = "../core" }, ] test-integration = [] -typing = [{ name = "langchain", editable = "../langchain" }] +typing = [{ name = "langchain-classic", editable = "../langchain" }] [[package]] name = "langchain-core" -version = "1.0.0a6" +version = "1.0.0" source = { editable = "../core" } dependencies = [ { name = "jsonpatch" }, @@ -541,7 +563,7 @@ typing = [ [[package]] name = "langchain-text-splitters" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "../text-splitters" } dependencies = [ { name = "langchain-core" }, @@ -574,8 +596,8 @@ test-integration = [ { name = "nltk", specifier = ">=3.9.1,<4.0.0" }, { name = "scipy", marker = "python_full_version == '3.12.*'", specifier = ">=1.7.0,<2.0.0" }, { name = "scipy", marker = "python_full_version >= '3.13'", specifier = ">=1.14.1,<2.0.0" }, - { name = "sentence-transformers", specifier = ">=3.0.1,<4.0.0" }, - { name = "spacy", specifier = ">=3.8.7,<4.0.0" }, + { name = "sentence-transformers", marker = "python_full_version < '3.14'", specifier = ">=3.0.1,<4.0.0" }, + { name = "spacy", marker = "python_full_version < '3.14'", specifier = ">=3.8.7,<4.0.0" }, { name = "thinc", specifier = ">=8.3.6,<9.0.0" }, { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "transformers", specifier = ">=4.51.3,<5.0.0" }, @@ -588,6 +610,62 @@ typing = [ { name = "types-requests", specifier = ">=2.31.0.20240218,<3.0.0.0" }, ] +[[package]] +name = "langgraph" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph-checkpoint" }, + { name = "langgraph-prebuilt" }, + { name = "langgraph-sdk" }, + { name = "pydantic" }, + { name = "xxhash" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/f7/7ae10f1832ab1a6a402f451e54d6dab277e28e7d4e4204e070c7897ca71c/langgraph-1.0.0.tar.gz", hash = "sha256:5f83ed0e9bbcc37635bc49cbc9b3d9306605fa07504f955b7a871ed715f9964c", size = 472835, upload-time = "2025-10-17T20:23:38.263Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/42/6f6d0fe4eb661b06da8e6c59e58044e9e4221fdbffdcacae864557de961e/langgraph-1.0.0-py3-none-any.whl", hash = "sha256:4d478781832a1bc67e06c3eb571412ec47d7c57a5467d1f3775adf0e9dd4042c", size = 155416, upload-time = "2025-10-17T20:23:36.978Z" }, +] + +[[package]] +name = "langgraph-checkpoint" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "ormsgpack" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/29/83/6404f6ed23a91d7bc63d7df902d144548434237d017820ceaa8d014035f2/langgraph_checkpoint-2.1.2.tar.gz", hash = "sha256:112e9d067a6eff8937caf198421b1ffba8d9207193f14ac6f89930c1260c06f9", size = 142420, upload-time = "2025-10-07T17:45:17.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/f2/06bf5addf8ee664291e1b9ffa1f28fc9d97e59806dc7de5aea9844cbf335/langgraph_checkpoint-2.1.2-py3-none-any.whl", hash = "sha256:911ebffb069fd01775d4b5184c04aaafc2962fcdf50cf49d524cd4367c4d0c60", size = 45763, upload-time = "2025-10-07T17:45:16.19Z" }, +] + +[[package]] +name = "langgraph-prebuilt" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph-checkpoint" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/2d/934b1129e217216a0dfaf0f7df0a10cedf2dfafe6cc8e1ee238cafaaa4a7/langgraph_prebuilt-1.0.0.tar.gz", hash = "sha256:eb75dad9aca0137451ca0395aa8541a665b3f60979480b0431d626fd195dcda2", size = 119927, upload-time = "2025-10-17T20:15:21.429Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/2e/ffa698eedc4c355168a9207ee598b2cc74ede92ce2b55c3469ea06978b6e/langgraph_prebuilt-1.0.0-py3-none-any.whl", hash = "sha256:ceaae4c5cee8c1f9b6468f76c114cafebb748aed0c93483b7c450e5a89de9c61", size = 28455, upload-time = "2025-10-17T20:15:20.043Z" }, +] + +[[package]] +name = "langgraph-sdk" +version = "0.2.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "orjson" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/d8/40e01190a73c564a4744e29a6c902f78d34d43dad9b652a363a92a67059c/langgraph_sdk-0.2.9.tar.gz", hash = "sha256:b3bd04c6be4fa382996cd2be8fbc1e7cc94857d2bc6b6f4599a7f2a245975303", size = 99802, upload-time = "2025-09-20T18:49:14.734Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/05/b2d34e16638241e6f27a6946d28160d4b8b641383787646d41a3727e0896/langgraph_sdk-0.2.9-py3-none-any.whl", hash = "sha256:fbf302edadbf0fb343596f91c597794e936ef68eebc0d3e1d358b6f9f72a1429", size = 56752, upload-time = "2025-09-20T18:49:13.346Z" }, +] + [[package]] name = "langserve" version = "0.0.51" @@ -780,6 +858,61 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/28/01/d6b274a0635be0468d4dbd9cafe80c47105937a0d42434e805e67cd2ed8b/orjson-3.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:e8f6a7a27d7b7bec81bd5924163e9af03d49bbb63013f107b48eb5d16db711bc", size = 125985, upload-time = "2025-08-26T17:46:16.67Z" }, ] +[[package]] +name = "ormsgpack" +version = "1.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/f8/224c342c0e03e131aaa1a1f19aa2244e167001783a433f4eed10eedd834b/ormsgpack-1.11.0.tar.gz", hash = "sha256:7c9988e78fedba3292541eb3bb274fa63044ef4da2ddb47259ea70c05dee4206", size = 49357, upload-time = "2025-10-08T17:29:15.621Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/3d/6996193cb2babc47fc92456223bef7d141065357ad4204eccf313f47a7b3/ormsgpack-1.11.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:03d4e658dd6e1882a552ce1d13cc7b49157414e7d56a4091fbe7823225b08cba", size = 367965, upload-time = "2025-10-08T17:28:06.736Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/c83b805dd9caebb046f4ceeed3706d0902ed2dbbcf08b8464e89f2c52e05/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb67eb913c2b703f0ed39607fc56e50724dd41f92ce080a586b4d6149eb3fe4", size = 195209, upload-time = "2025-10-08T17:28:08.395Z" }, + { url = "https://files.pythonhosted.org/packages/3a/17/427d9c4f77b120f0af01d7a71d8144771c9388c2a81f712048320e31353b/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e54175b92411f73a238e5653a998627f6660de3def37d9dd7213e0fd264ca56", size = 205868, upload-time = "2025-10-08T17:28:09.688Z" }, + { url = "https://files.pythonhosted.org/packages/82/32/a9ce218478bdbf3fee954159900e24b314ab3064f7b6a217ccb1e3464324/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca2b197f4556e1823d1319869d4c5dc278be335286d2308b0ed88b59a5afcc25", size = 207391, upload-time = "2025-10-08T17:28:11.031Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d3/4413fe7454711596fdf08adabdfa686580e4656702015108e4975f00a022/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc62388262f58c792fe1e450e1d9dbcc174ed2fb0b43db1675dd7c5ff2319d6a", size = 377078, upload-time = "2025-10-08T17:28:12.39Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ad/13fae555a45e35ca1ca929a27c9ee0a3ecada931b9d44454658c543f9b9c/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c48bc10af74adfbc9113f3fb160dc07c61ad9239ef264c17e449eba3de343dc2", size = 470776, upload-time = "2025-10-08T17:28:13.484Z" }, + { url = "https://files.pythonhosted.org/packages/36/60/51178b093ffc4e2ef3381013a67223e7d56224434fba80047249f4a84b26/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a608d3a1d4fa4acdc5082168a54513cff91f47764cef435e81a483452f5f7647", size = 380862, upload-time = "2025-10-08T17:28:14.747Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e3/1cb6c161335e2ae7d711ecfb007a31a3936603626e347c13e5e53b7c7cf8/ormsgpack-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:97217b4f7f599ba45916b9c4c4b1d5656e8e2a4d91e2e191d72a7569d3c30923", size = 112058, upload-time = "2025-10-08T17:28:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/90164d00e8e94b48eff8a17bc2f4be6b71ae356a00904bc69d5e8afe80fb/ormsgpack-1.11.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c7be823f47d8e36648d4bc90634b93f02b7d7cc7480081195f34767e86f181fb", size = 367964, upload-time = "2025-10-08T17:28:16.778Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c2/fb6331e880a3446c1341e72c77bd5a46da3e92a8e2edf7ea84a4c6c14fff/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68accf15d1b013812755c0eb7a30e1fc2f81eb603a1a143bf0cda1b301cfa797", size = 195209, upload-time = "2025-10-08T17:28:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/50/4943fb5df8cc02da6b7b1ee2c2a7fb13aebc9f963d69280b1bb02b1fb178/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:805d06fb277d9a4e503c0c707545b49cde66cbb2f84e5cf7c58d81dfc20d8658", size = 205869, upload-time = "2025-10-08T17:28:19.01Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fa/e7e06835bfea9adeef43915143ce818098aecab0cbd3df584815adf3e399/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1e57cdf003e77acc43643bda151dc01f97147a64b11cdee1380bb9698a7601c", size = 207391, upload-time = "2025-10-08T17:28:20.352Z" }, + { url = "https://files.pythonhosted.org/packages/33/f0/f28a19e938a14ec223396e94f4782fbcc023f8c91f2ab6881839d3550f32/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:37fc05bdaabd994097c62e2f3e08f66b03f856a640ede6dc5ea340bd15b77f4d", size = 377081, upload-time = "2025-10-08T17:28:21.926Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e3/73d1d7287637401b0b6637e30ba9121e1aa1d9f5ea185ed9834ca15d512c/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a6e9db6c73eb46b2e4d97bdffd1368a66f54e6806b563a997b19c004ef165e1d", size = 470779, upload-time = "2025-10-08T17:28:22.993Z" }, + { url = "https://files.pythonhosted.org/packages/9c/46/7ba7f9721e766dd0dfe4cedf444439447212abffe2d2f4538edeeec8ccbd/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9c44eae5ac0196ffc8b5ed497c75511056508f2303fa4d36b208eb820cf209e", size = 380865, upload-time = "2025-10-08T17:28:24.012Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7d/bb92a0782bbe0626c072c0320001410cf3f6743ede7dc18f034b1a18edef/ormsgpack-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:11d0dfaf40ae7c6de4f7dbd1e4892e2e6a55d911ab1774357c481158d17371e4", size = 112058, upload-time = "2025-10-08T17:28:25.015Z" }, + { url = "https://files.pythonhosted.org/packages/28/1a/f07c6f74142815d67e1d9d98c5b2960007100408ade8242edac96d5d1c73/ormsgpack-1.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:0c63a3f7199a3099c90398a1bdf0cb577b06651a442dc5efe67f2882665e5b02", size = 105894, upload-time = "2025-10-08T17:28:25.93Z" }, + { url = "https://files.pythonhosted.org/packages/1e/16/2805ebfb3d2cbb6c661b5fae053960fc90a2611d0d93e2207e753e836117/ormsgpack-1.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3434d0c8d67de27d9010222de07fb6810fb9af3bb7372354ffa19257ac0eb83b", size = 368474, upload-time = "2025-10-08T17:28:27.532Z" }, + { url = "https://files.pythonhosted.org/packages/6f/39/6afae47822dca0ce4465d894c0bbb860a850ce29c157882dbdf77a5dd26e/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2da5bd097e8dbfa4eb0d4ccfe79acd6f538dee4493579e2debfe4fc8f4ca89b", size = 195321, upload-time = "2025-10-08T17:28:28.573Z" }, + { url = "https://files.pythonhosted.org/packages/f6/54/11eda6b59f696d2f16de469bfbe539c9f469c4b9eef5a513996b5879c6e9/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fdbaa0a5a8606a486960b60c24f2d5235d30ac7a8b98eeaea9854bffef14dc3d", size = 206036, upload-time = "2025-10-08T17:28:29.785Z" }, + { url = "https://files.pythonhosted.org/packages/1e/86/890430f704f84c4699ddad61c595d171ea2fd77a51fbc106f83981e83939/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3682f24f800c1837017ee90ce321086b2cbaef88db7d4cdbbda1582aa6508159", size = 207615, upload-time = "2025-10-08T17:28:31.076Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b9/77383e16c991c0ecb772205b966fc68d9c519e0b5f9c3913283cbed30ffe/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fcca21202bb05ccbf3e0e92f560ee59b9331182e4c09c965a28155efbb134993", size = 377195, upload-time = "2025-10-08T17:28:32.436Z" }, + { url = "https://files.pythonhosted.org/packages/20/e2/15f9f045d4947f3c8a5e0535259fddf027b17b1215367488b3565c573b9d/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c30e5c4655ba46152d722ec7468e8302195e6db362ec1ae2c206bc64f6030e43", size = 470960, upload-time = "2025-10-08T17:28:33.556Z" }, + { url = "https://files.pythonhosted.org/packages/b8/61/403ce188c4c495bc99dff921a0ad3d9d352dd6d3c4b629f3638b7f0cf79b/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7138a341f9e2c08c59368f03d3be25e8b87b3baaf10d30fb1f6f6b52f3d47944", size = 381174, upload-time = "2025-10-08T17:28:34.781Z" }, + { url = "https://files.pythonhosted.org/packages/14/a8/94c94bc48c68da4374870a851eea03fc5a45eb041182ad4c5ed9acfc05a4/ormsgpack-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d4bd8589b78a11026d47f4edf13c1ceab9088bb12451f34396afe6497db28a27", size = 112314, upload-time = "2025-10-08T17:28:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/19/d0/aa4cf04f04e4cc180ce7a8d8ddb5a7f3af883329cbc59645d94d3ba157a5/ormsgpack-1.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:e5e746a1223e70f111d4001dab9585ac8639eee8979ca0c8db37f646bf2961da", size = 106072, upload-time = "2025-10-08T17:28:37.518Z" }, + { url = "https://files.pythonhosted.org/packages/8b/35/e34722edb701d053cf2240f55974f17b7dbfd11fdef72bd2f1835bcebf26/ormsgpack-1.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e7b36ab7b45cb95217ae1f05f1318b14a3e5ef73cb00804c0f06233f81a14e8", size = 368502, upload-time = "2025-10-08T17:28:38.547Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6a/c2fc369a79d6aba2aa28c8763856c95337ac7fcc0b2742185cd19397212a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43402d67e03a9a35cc147c8c03f0c377cad016624479e1ee5b879b8425551484", size = 195344, upload-time = "2025-10-08T17:28:39.554Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6a/0f8e24b7489885534c1a93bdba7c7c434b9b8638713a68098867db9f254c/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:64fd992f932764d6306b70ddc755c1bc3405c4c6a69f77a36acf7af1c8f5ada4", size = 206045, upload-time = "2025-10-08T17:28:40.561Z" }, + { url = "https://files.pythonhosted.org/packages/99/71/8b460ba264f3c6f82ef5b1920335720094e2bd943057964ce5287d6df83a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0362fb7fe4a29c046c8ea799303079a09372653a1ce5a5a588f3bbb8088368d0", size = 207641, upload-time = "2025-10-08T17:28:41.736Z" }, + { url = "https://files.pythonhosted.org/packages/50/cf/f369446abaf65972424ed2651f2df2b7b5c3b735c93fc7fa6cfb81e34419/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:de2f7a65a9d178ed57be49eba3d0fc9b833c32beaa19dbd4ba56014d3c20b152", size = 377211, upload-time = "2025-10-08T17:28:43.12Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3f/948bb0047ce0f37c2efc3b9bb2bcfdccc61c63e0b9ce8088d4903ba39dcf/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:f38cfae95461466055af966fc922d06db4e1654966385cda2828653096db34da", size = 470973, upload-time = "2025-10-08T17:28:44.465Z" }, + { url = "https://files.pythonhosted.org/packages/31/a4/92a8114d1d017c14aaa403445060f345df9130ca532d538094f38e535988/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c88396189d238f183cea7831b07a305ab5c90d6d29b53288ae11200bd956357b", size = 381161, upload-time = "2025-10-08T17:28:46.063Z" }, + { url = "https://files.pythonhosted.org/packages/d0/64/5b76447da654798bfcfdfd64ea29447ff2b7f33fe19d0e911a83ad5107fc/ormsgpack-1.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:5403d1a945dd7c81044cebeca3f00a28a0f4248b33242a5d2d82111628043725", size = 112321, upload-time = "2025-10-08T17:28:47.393Z" }, + { url = "https://files.pythonhosted.org/packages/46/5e/89900d06db9ab81e7ec1fd56a07c62dfbdcda398c435718f4252e1dc52a0/ormsgpack-1.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:c57357b8d43b49722b876edf317bdad9e6d52071b523fdd7394c30cd1c67d5a0", size = 106084, upload-time = "2025-10-08T17:28:48.305Z" }, + { url = "https://files.pythonhosted.org/packages/4c/0b/c659e8657085c8c13f6a0224789f422620cef506e26573b5434defe68483/ormsgpack-1.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:d390907d90fd0c908211592c485054d7a80990697ef4dff4e436ac18e1aab98a", size = 368497, upload-time = "2025-10-08T17:28:49.297Z" }, + { url = "https://files.pythonhosted.org/packages/1b/0e/451e5848c7ed56bd287e8a2b5cb5926e54466f60936e05aec6cb299f9143/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6153c2e92e789509098e04c9aa116b16673bd88ec78fbe0031deeb34ab642d10", size = 195385, upload-time = "2025-10-08T17:28:50.314Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/90f78cbbe494959f2439c2ec571f08cd3464c05a6a380b0d621c622122a9/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2b2c2a065a94d742212b2018e1fecd8f8d72f3c50b53a97d1f407418093446d", size = 206114, upload-time = "2025-10-08T17:28:51.336Z" }, + { url = "https://files.pythonhosted.org/packages/fb/db/34163f4c0923bea32dafe42cd878dcc66795a3e85669bc4b01c1e2b92a7b/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:110e65b5340f3d7ef8b0009deae3c6b169437e6b43ad5a57fd1748085d29d2ac", size = 207679, upload-time = "2025-10-08T17:28:53.627Z" }, + { url = "https://files.pythonhosted.org/packages/b6/14/04ee741249b16f380a9b4a0cc19d4134d0b7c74bab27a2117da09e525eb9/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c27e186fca96ab34662723e65b420919910acbbc50fc8e1a44e08f26268cb0e0", size = 377237, upload-time = "2025-10-08T17:28:56.12Z" }, + { url = "https://files.pythonhosted.org/packages/89/ff/53e588a6aaa833237471caec679582c2950f0e7e1a8ba28c1511b465c1f4/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d56b1f877c13d499052d37a3db2378a97d5e1588d264f5040b3412aee23d742c", size = 471021, upload-time = "2025-10-08T17:28:57.299Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f9/f20a6d9ef2be04da3aad05e8f5699957e9a30c6d5c043a10a296afa7e890/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c88e28cd567c0a3269f624b4ade28142d5e502c8e826115093c572007af5be0a", size = 381205, upload-time = "2025-10-08T17:28:58.872Z" }, + { url = "https://files.pythonhosted.org/packages/f8/64/96c07d084b479ac8b7821a77ffc8d3f29d8b5c95ebfdf8db1c03dff02762/ormsgpack-1.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:8811160573dc0a65f62f7e0792c4ca6b7108dfa50771edb93f9b84e2d45a08ae", size = 112374, upload-time = "2025-10-08T17:29:00Z" }, + { url = "https://files.pythonhosted.org/packages/88/a5/5dcc18b818d50213a3cadfe336bb6163a102677d9ce87f3d2f1a1bee0f8c/ormsgpack-1.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:23e30a8d3c17484cf74e75e6134322255bd08bc2b5b295cc9c442f4bae5f3c2d", size = 106056, upload-time = "2025-10-08T17:29:01.29Z" }, + { url = "https://files.pythonhosted.org/packages/19/2b/776d1b411d2be50f77a6e6e94a25825cca55dcacfe7415fd691a144db71b/ormsgpack-1.11.0-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2905816502adfaf8386a01dd85f936cd378d243f4f5ee2ff46f67f6298dc90d5", size = 368661, upload-time = "2025-10-08T17:29:02.382Z" }, + { url = "https://files.pythonhosted.org/packages/a9/0c/81a19e6115b15764db3d241788f9fac093122878aaabf872cc545b0c4650/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c04402fb9a0a9b9f18fbafd6d5f8398ee99b3ec619fb63952d3a954bc9d47daa", size = 195539, upload-time = "2025-10-08T17:29:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/97/86/e5b50247a61caec5718122feb2719ea9d451d30ac0516c288c1dbc6408e8/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a025ec07ac52056ecfd9e57b5cbc6fff163f62cb9805012b56cda599157f8ef2", size = 207718, upload-time = "2025-10-08T17:29:04.545Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -809,7 +942,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -817,96 +950,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/1e/4f0a3233767010308f2fd6bd0814597e3f63f1dc98304a9112b8759df4ff/pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74", size = 819383, upload-time = "2025-10-17T15:04:21.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6b/83661fa77dcefa195ad5f8cd9af3d1a7450fd57cc883ad04d65446ac2029/pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf", size = 462431, upload-time = "2025-10-17T15:04:19.346Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, + { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, + { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, + { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, + { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, + { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, + { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, + { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, + { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, + { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, + { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, + { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, + { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, + { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, + { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, + { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, + { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, + { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, + { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, + { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, + { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, + { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, + { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, + { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, + { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, + { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, + { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, + { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, + { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, + { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, + { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, + { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, + { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, + { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, + { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, + { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, + { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, + { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, + { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, + { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, + { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, + { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, + { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, + { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, + { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, + { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, + { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, + { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, ] [[package]] @@ -1327,6 +1487,124 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, ] +[[package]] +name = "xxhash" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/ee/f9f1d656ad168681bb0f6b092372c1e533c4416b8069b1896a175c46e484/xxhash-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:87ff03d7e35c61435976554477a7f4cd1704c3596a89a8300d5ce7fc83874a71", size = 32845, upload-time = "2025-10-02T14:33:51.573Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/93508d9460b292c74a09b83d16750c52a0ead89c51eea9951cb97a60d959/xxhash-3.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f572dfd3d0e2eb1a57511831cf6341242f5a9f8298a45862d085f5b93394a27d", size = 30807, upload-time = "2025-10-02T14:33:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/07/55/28c93a3662f2d200c70704efe74aab9640e824f8ce330d8d3943bf7c9b3c/xxhash-3.6.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:89952ea539566b9fed2bbd94e589672794b4286f342254fad28b149f9615fef8", size = 193786, upload-time = "2025-10-02T14:33:54.272Z" }, + { url = "https://files.pythonhosted.org/packages/c1/96/fec0be9bb4b8f5d9c57d76380a366f31a1781fb802f76fc7cda6c84893c7/xxhash-3.6.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e6f2ffb07a50b52465a1032c3cf1f4a5683f944acaca8a134a2f23674c2058", size = 212830, upload-time = "2025-10-02T14:33:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/c706845ba77b9611f81fd2e93fad9859346b026e8445e76f8c6fd057cc6d/xxhash-3.6.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b5b848ad6c16d308c3ac7ad4ba6bede80ed5df2ba8ed382f8932df63158dd4b2", size = 211606, upload-time = "2025-10-02T14:33:57.133Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/164126a2999e5045f04a69257eea946c0dc3e86541b400d4385d646b53d7/xxhash-3.6.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a034590a727b44dd8ac5914236a7b8504144447a9682586c3327e935f33ec8cc", size = 444872, upload-time = "2025-10-02T14:33:58.446Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4b/55ab404c56cd70a2cf5ecfe484838865d0fea5627365c6c8ca156bd09c8f/xxhash-3.6.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a8f1972e75ebdd161d7896743122834fe87378160c20e97f8b09166213bf8cc", size = 193217, upload-time = "2025-10-02T14:33:59.724Z" }, + { url = "https://files.pythonhosted.org/packages/45/e6/52abf06bac316db33aa269091ae7311bd53cfc6f4b120ae77bac1b348091/xxhash-3.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ee34327b187f002a596d7b167ebc59a1b729e963ce645964bbc050d2f1b73d07", size = 210139, upload-time = "2025-10-02T14:34:02.041Z" }, + { url = "https://files.pythonhosted.org/packages/34/37/db94d490b8691236d356bc249c08819cbcef9273a1a30acf1254ff9ce157/xxhash-3.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:339f518c3c7a850dd033ab416ea25a692759dc7478a71131fe8869010d2b75e4", size = 197669, upload-time = "2025-10-02T14:34:03.664Z" }, + { url = "https://files.pythonhosted.org/packages/b7/36/c4f219ef4a17a4f7a64ed3569bc2b5a9c8311abdb22249ac96093625b1a4/xxhash-3.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:bf48889c9630542d4709192578aebbd836177c9f7a4a2778a7d6340107c65f06", size = 210018, upload-time = "2025-10-02T14:34:05.325Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/bfac889a374fc2fc439a69223d1750eed2e18a7db8514737ab630534fa08/xxhash-3.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5576b002a56207f640636056b4160a378fe36a58db73ae5c27a7ec8db35f71d4", size = 413058, upload-time = "2025-10-02T14:34:06.925Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d1/555d8447e0dd32ad0930a249a522bb2e289f0d08b6b16204cfa42c1f5a0c/xxhash-3.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af1f3278bd02814d6dedc5dec397993b549d6f16c19379721e5a1d31e132c49b", size = 190628, upload-time = "2025-10-02T14:34:08.669Z" }, + { url = "https://files.pythonhosted.org/packages/d1/15/8751330b5186cedc4ed4b597989882ea05e0408b53fa47bcb46a6125bfc6/xxhash-3.6.0-cp310-cp310-win32.whl", hash = "sha256:aed058764db109dc9052720da65fafe84873b05eb8b07e5e653597951af57c3b", size = 30577, upload-time = "2025-10-02T14:34:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/bb/cc/53f87e8b5871a6eb2ff7e89c48c66093bda2be52315a8161ddc54ea550c4/xxhash-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e82da5670f2d0d98950317f82a0e4a0197150ff19a6df2ba40399c2a3b9ae5fb", size = 31487, upload-time = "2025-10-02T14:34:11.618Z" }, + { url = "https://files.pythonhosted.org/packages/9f/00/60f9ea3bb697667a14314d7269956f58bf56bb73864f8f8d52a3c2535e9a/xxhash-3.6.0-cp310-cp310-win_arm64.whl", hash = "sha256:4a082ffff8c6ac07707fb6b671caf7c6e020c75226c561830b73d862060f281d", size = 27863, upload-time = "2025-10-02T14:34:12.619Z" }, + { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, + { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, + { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, + { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, + { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, + { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, + { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, + { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, + { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, + { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, + { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, + { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, + { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, + { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, + { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, + { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, + { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, + { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, + { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, + { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, + { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, + { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, + { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, + { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, + { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, + { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, + { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, + { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, + { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, +] + [[package]] name = "zstandard" version = "0.25.0" diff --git a/libs/core/README.md b/libs/core/README.md index 9aacc631c36..80cfbe0d8ec 100644 --- a/libs/core/README.md +++ b/libs/core/README.md @@ -1,7 +1,14 @@ # 🦜🍎️ LangChain Core -[![PyPI - License](https://img.shields.io/pypi/l/langchain-core?style=flat-square)](https://opensource.org/licenses/MIT) +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-core?label=%20)](https://pypi.org/project/langchain-core/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-core)](https://opensource.org/licenses/MIT) [![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-core)](https://pypistats.org/packages/langchain-core) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +To help you ship LangChain apps to production faster, check out [LangSmith](https://smith.langchain.com). +[LangSmith](https://smith.langchain.com) is a unified developer platform for building, testing, and monitoring LLM applications. ## Quick Install @@ -9,16 +16,14 @@ pip install langchain-core ``` -## What is it? +## πŸ€” What is this? -LangChain Core contains the base abstractions that power the the LangChain ecosystem. +LangChain Core contains the base abstractions that power the LangChain ecosystem. These abstractions are designed to be as modular and simple as possible. The benefit of having these abstractions is that any provider can implement the required interface and then easily be used in the rest of the LangChain ecosystem. -For full documentation see the [API reference](https://reference.langchain.com/python/). - ## ⛰️ Why build on top of LangChain Core? The LangChain ecosystem is built on top of `langchain-core`. Some of the benefits: @@ -27,12 +32,16 @@ The LangChain ecosystem is built on top of `langchain-core`. Some of the benefit - **Stability**: We are committed to a stable versioning scheme, and will communicate any breaking changes with advance notice and version bumps. - **Battle-tested**: Core components have the largest install base in the LLM ecosystem, and are used in production by many companies. +## πŸ“– Documentation + +For full documentation, see the [API reference](https://reference.langchain.com/python/langchain_core/). + ## πŸ“• Releases & Versioning -See our [Releases](https://docs.langchain.com/oss/python/release-policy) and [Versioning Policy](https://docs.langchain.com/oss/python/versioning). +See our [Releases](https://docs.langchain.com/oss/python/release-policy) and [Versioning](https://docs.langchain.com/oss/python/versioning) policies. ## πŸ’ Contributing As an open-source project in a rapidly developing field, we are extremely open to contributions, whether it be in the form of a new feature, improved infrastructure, or better documentation. -For detailed information on how to contribute, see the [Contributing Guide](https://docs.langchain.com/oss/python/contributing). +For detailed information on how to contribute, see the [Contributing Guide](https://docs.langchain.com/oss/python/contributing/overview). diff --git a/libs/core/langchain_core/agents.py b/libs/core/langchain_core/agents.py index 62a44623972..4a020a99c95 100644 --- a/libs/core/langchain_core/agents.py +++ b/libs/core/langchain_core/agents.py @@ -5,12 +5,10 @@ !!! warning New agents should be built using the - [langgraph library](https://github.com/langchain-ai/langgraph), which provides a + [`langchain` library](https://pypi.org/project/langchain/), which provides a simpler and more flexible way to define agents. - Please see the - [migration guide](https://python.langchain.com/docs/how_to/migrate_agent/) for - information on how to migrate existing agents to modern langgraph agents. + See docs on [building agents](https://docs.langchain.com/oss/python/langchain/agents). Agents use language models to choose a sequence of actions to take. @@ -54,37 +52,39 @@ class AgentAction(Serializable): """The input to pass in to the Tool.""" log: str """Additional information to log about the action. - This log can be used in a few ways. First, it can be used to audit - what exactly the LLM predicted to lead to this (tool, tool_input). - Second, it can be used in future iterations to show the LLMs prior - thoughts. This is useful when (tool, tool_input) does not contain - full information about the LLM prediction (for example, any `thought` - before the tool/tool_input).""" + + This log can be used in a few ways. First, it can be used to audit what exactly the + LLM predicted to lead to this `(tool, tool_input)`. + + Second, it can be used in future iterations to show the LLMs prior thoughts. This is + useful when `(tool, tool_input)` does not contain full information about the LLM + prediction (for example, any `thought` before the tool/tool_input). + """ type: Literal["AgentAction"] = "AgentAction" # Override init to support instantiation by position for backward compat. def __init__(self, tool: str, tool_input: str | dict, log: str, **kwargs: Any): - """Create an AgentAction. + """Create an `AgentAction`. Args: tool: The name of the tool to execute. - tool_input: The input to pass in to the Tool. + tool_input: The input to pass in to the `Tool`. log: Additional information to log about the action. """ super().__init__(tool=tool, tool_input=tool_input, log=log, **kwargs) @classmethod def is_lc_serializable(cls) -> bool: - """AgentAction is serializable. + """`AgentAction` is serializable. Returns: - True + `True` """ return True @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "agent"]` @@ -100,19 +100,23 @@ class AgentAction(Serializable): class AgentActionMessageLog(AgentAction): """Representation of an action to be executed by an agent. - This is similar to AgentAction, but includes a message log consisting of - chat messages. This is useful when working with ChatModels, and is used - to reconstruct conversation history from the agent's perspective. + This is similar to `AgentAction`, but includes a message log consisting of + chat messages. + + This is useful when working with `ChatModels`, and is used to reconstruct + conversation history from the agent's perspective. """ message_log: Sequence[BaseMessage] - """Similar to log, this can be used to pass along extra - information about what exact messages were predicted by the LLM - before parsing out the (tool, tool_input). This is again useful - if (tool, tool_input) cannot be used to fully recreate the LLM - prediction, and you need that LLM prediction (for future agent iteration). + """Similar to log, this can be used to pass along extra information about what exact + messages were predicted by the LLM before parsing out the `(tool, tool_input)`. + + This is again useful if `(tool, tool_input)` cannot be used to fully recreate the + LLM prediction, and you need that LLM prediction (for future agent iteration). + Compared to `log`, this is useful when the underlying LLM is a - ChatModel (and therefore returns messages rather than a string).""" + chat model (and therefore returns messages rather than a string). + """ # Ignoring type because we're overriding the type from AgentAction. # And this is the correct thing to do in this case. # The type literal is used for serialization purposes. @@ -120,12 +124,12 @@ class AgentActionMessageLog(AgentAction): class AgentStep(Serializable): - """Result of running an AgentAction.""" + """Result of running an `AgentAction`.""" action: AgentAction - """The AgentAction that was executed.""" + """The `AgentAction` that was executed.""" observation: Any - """The result of the AgentAction.""" + """The result of the `AgentAction`.""" @property def messages(self) -> Sequence[BaseMessage]: @@ -134,19 +138,22 @@ class AgentStep(Serializable): class AgentFinish(Serializable): - """Final return value of an ActionAgent. + """Final return value of an `ActionAgent`. - Agents return an AgentFinish when they have reached a stopping condition. + Agents return an `AgentFinish` when they have reached a stopping condition. """ return_values: dict """Dictionary of return values.""" log: str """Additional information to log about the return value. + This is used to pass along the full LLM prediction, not just the parsed out - return value. For example, if the full LLM prediction was - `Final Answer: 2` you may want to just return `2` as a return value, but pass - along the full string as a `log` (for debugging or observability purposes). + return value. + + For example, if the full LLM prediction was `Final Answer: 2` you may want to just + return `2` as a return value, but pass along the full string as a `log` (for + debugging or observability purposes). """ type: Literal["AgentFinish"] = "AgentFinish" @@ -156,12 +163,12 @@ class AgentFinish(Serializable): @classmethod def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "agent"]` @@ -204,7 +211,7 @@ def _convert_agent_observation_to_messages( observation: Observation to convert to a message. Returns: - AIMessage that corresponds to the original tool invocation. + `AIMessage` that corresponds to the original tool invocation. """ if isinstance(agent_action, AgentActionMessageLog): return [_create_function_message(agent_action, observation)] @@ -227,7 +234,7 @@ def _create_function_message( observation: the result of the tool invocation. Returns: - FunctionMessage that corresponds to the original tool invocation. + `FunctionMessage` that corresponds to the original tool invocation. """ if not isinstance(observation, str): try: diff --git a/libs/core/langchain_core/caches.py b/libs/core/langchain_core/caches.py index 9db037e913b..86139c5d821 100644 --- a/libs/core/langchain_core/caches.py +++ b/libs/core/langchain_core/caches.py @@ -1,18 +1,17 @@ -"""Cache classes. +"""Optional caching layer for language models. -!!! warning - Beta Feature! +Distinct from provider-based [prompt caching](https://docs.langchain.com/oss/python/langchain/models#prompt-caching). -**Cache** provides an optional caching layer for LLMs. +!!! warning "Beta feature" + This is a beta feature. Please be wary of deploying experimental code to production + unless you've taken appropriate precautions. -Cache is useful for two reasons: +A cache is useful for two reasons: -- It can save you money by reducing the number of API calls you make to the LLM +1. It can save you money by reducing the number of API calls you make to the LLM provider if you're often requesting the same completion multiple times. -- It can speed up your application by reducing the number of API calls you make - to the LLM provider. - -Cache directly competes with Memory. See documentation for Pros and Cons. +2. It can speed up your application by reducing the number of API calls you make to the + LLM provider. """ from __future__ import annotations @@ -34,8 +33,8 @@ class BaseCache(ABC): The cache interface consists of the following methods: - - lookup: Look up a value based on a prompt and llm_string. - - update: Update the cache based on a prompt and llm_string. + - lookup: Look up a value based on a prompt and `llm_string`. + - update: Update the cache based on a prompt and `llm_string`. - clear: Clear the cache. In addition, the cache interface provides an async version of each method. @@ -47,43 +46,46 @@ class BaseCache(ABC): @abstractmethod def lookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TYPE | None: - """Look up based on prompt and llm_string. + """Look up based on `prompt` and `llm_string`. A cache implementation is expected to generate a key from the 2-tuple - of prompt and llm_string (e.g., by concatenating them with a delimiter). + of `prompt` and `llm_string` (e.g., by concatenating them with a delimiter). Args: - prompt: a string representation of the prompt. - In the case of a Chat model, the prompt is a non-trivial + prompt: A string representation of the prompt. + In the case of a chat model, the prompt is a non-trivial serialization of the prompt into the language model. llm_string: A string representation of the LLM configuration. + This is used to capture the invocation parameters of the LLM (e.g., model name, temperature, stop tokens, max tokens, etc.). - These invocation parameters are serialized into a string - representation. + + These invocation parameters are serialized into a string representation. Returns: - On a cache miss, return None. On a cache hit, return the cached value. - The cached value is a list of Generations (or subclasses). + On a cache miss, return `None`. On a cache hit, return the cached value. + The cached value is a list of `Generation` (or subclasses). """ @abstractmethod def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: - """Update cache based on prompt and llm_string. + """Update cache based on `prompt` and `llm_string`. The prompt and llm_string are used to generate a key for the cache. The key should match that of the lookup method. Args: - prompt: a string representation of the prompt. - In the case of a Chat model, the prompt is a non-trivial + prompt: A string representation of the prompt. + In the case of a chat model, the prompt is a non-trivial serialization of the prompt into the language model. llm_string: A string representation of the LLM configuration. + This is used to capture the invocation parameters of the LLM (e.g., model name, temperature, stop tokens, max tokens, etc.). + These invocation parameters are serialized into a string representation. - return_val: The value to be cached. The value is a list of Generations + return_val: The value to be cached. The value is a list of `Generation` (or subclasses). """ @@ -92,45 +94,49 @@ class BaseCache(ABC): """Clear cache that can take additional keyword arguments.""" async def alookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TYPE | None: - """Async look up based on prompt and llm_string. + """Async look up based on `prompt` and `llm_string`. A cache implementation is expected to generate a key from the 2-tuple - of prompt and llm_string (e.g., by concatenating them with a delimiter). + of `prompt` and `llm_string` (e.g., by concatenating them with a delimiter). Args: - prompt: a string representation of the prompt. - In the case of a Chat model, the prompt is a non-trivial + prompt: A string representation of the prompt. + In the case of a chat model, the prompt is a non-trivial serialization of the prompt into the language model. llm_string: A string representation of the LLM configuration. + This is used to capture the invocation parameters of the LLM (e.g., model name, temperature, stop tokens, max tokens, etc.). + These invocation parameters are serialized into a string representation. Returns: - On a cache miss, return None. On a cache hit, return the cached value. - The cached value is a list of Generations (or subclasses). + On a cache miss, return `None`. On a cache hit, return the cached value. + The cached value is a list of `Generation` (or subclasses). """ return await run_in_executor(None, self.lookup, prompt, llm_string) async def aupdate( self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE ) -> None: - """Async update cache based on prompt and llm_string. + """Async update cache based on `prompt` and `llm_string`. The prompt and llm_string are used to generate a key for the cache. The key should match that of the look up method. Args: - prompt: a string representation of the prompt. - In the case of a Chat model, the prompt is a non-trivial + prompt: A string representation of the prompt. + In the case of a chat model, the prompt is a non-trivial serialization of the prompt into the language model. llm_string: A string representation of the LLM configuration. + This is used to capture the invocation parameters of the LLM (e.g., model name, temperature, stop tokens, max tokens, etc.). + These invocation parameters are serialized into a string representation. - return_val: The value to be cached. The value is a list of Generations + return_val: The value to be cached. The value is a list of `Generation` (or subclasses). """ return await run_in_executor(None, self.update, prompt, llm_string, return_val) @@ -150,10 +156,9 @@ class InMemoryCache(BaseCache): maxsize: The maximum number of items to store in the cache. If `None`, the cache has no maximum size. If the cache exceeds the maximum size, the oldest items are removed. - Default is None. Raises: - ValueError: If maxsize is less than or equal to 0. + ValueError: If `maxsize` is less than or equal to `0`. """ self._cache: dict[tuple[str, str], RETURN_VAL_TYPE] = {} if maxsize is not None and maxsize <= 0: @@ -162,28 +167,28 @@ class InMemoryCache(BaseCache): self._maxsize = maxsize def lookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TYPE | None: - """Look up based on prompt and llm_string. + """Look up based on `prompt` and `llm_string`. Args: - prompt: a string representation of the prompt. - In the case of a Chat model, the prompt is a non-trivial + prompt: A string representation of the prompt. + In the case of a chat model, the prompt is a non-trivial serialization of the prompt into the language model. llm_string: A string representation of the LLM configuration. Returns: - On a cache miss, return None. On a cache hit, return the cached value. + On a cache miss, return `None`. On a cache hit, return the cached value. """ return self._cache.get((prompt, llm_string), None) def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: - """Update cache based on prompt and llm_string. + """Update cache based on `prompt` and `llm_string`. Args: - prompt: a string representation of the prompt. - In the case of a Chat model, the prompt is a non-trivial + prompt: A string representation of the prompt. + In the case of a chat model, the prompt is a non-trivial serialization of the prompt into the language model. llm_string: A string representation of the LLM configuration. - return_val: The value to be cached. The value is a list of Generations + return_val: The value to be cached. The value is a list of `Generation` (or subclasses). """ if self._maxsize is not None and len(self._cache) == self._maxsize: @@ -196,30 +201,30 @@ class InMemoryCache(BaseCache): self._cache = {} async def alookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TYPE | None: - """Async look up based on prompt and llm_string. + """Async look up based on `prompt` and `llm_string`. Args: - prompt: a string representation of the prompt. - In the case of a Chat model, the prompt is a non-trivial + prompt: A string representation of the prompt. + In the case of a chat model, the prompt is a non-trivial serialization of the prompt into the language model. llm_string: A string representation of the LLM configuration. Returns: - On a cache miss, return None. On a cache hit, return the cached value. + On a cache miss, return `None`. On a cache hit, return the cached value. """ return self.lookup(prompt, llm_string) async def aupdate( self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE ) -> None: - """Async update cache based on prompt and llm_string. + """Async update cache based on `prompt` and `llm_string`. Args: - prompt: a string representation of the prompt. - In the case of a Chat model, the prompt is a non-trivial + prompt: A string representation of the prompt. + In the case of a chat model, the prompt is a non-trivial serialization of the prompt into the language model. llm_string: A string representation of the LLM configuration. - return_val: The value to be cached. The value is a list of Generations + return_val: The value to be cached. The value is a list of `Generation` (or subclasses). """ self.update(prompt, llm_string, return_val) diff --git a/libs/core/langchain_core/callbacks/base.py b/libs/core/langchain_core/callbacks/base.py index 4265902d0c3..af455bd9700 100644 --- a/libs/core/langchain_core/callbacks/base.py +++ b/libs/core/langchain_core/callbacks/base.py @@ -420,8 +420,6 @@ class RunManagerMixin: (includes inherited tags). metadata: The metadata associated with the custom event (includes inherited metadata). - - !!! version-added "Added in version 0.2.15" """ @@ -882,8 +880,6 @@ class AsyncCallbackHandler(BaseCallbackHandler): (includes inherited tags). metadata: The metadata associated with the custom event (includes inherited metadata). - - !!! version-added "Added in version 0.2.15" """ @@ -1001,7 +997,7 @@ class BaseCallbackManager(CallbackManagerMixin): Args: handler: The handler to add. - inherit: Whether to inherit the handler. Default is True. + inherit: Whether to inherit the handler. """ if handler not in self.handlers: self.handlers.append(handler) @@ -1028,7 +1024,7 @@ class BaseCallbackManager(CallbackManagerMixin): Args: handlers: The handlers to set. - inherit: Whether to inherit the handlers. Default is True. + inherit: Whether to inherit the handlers. """ self.handlers = [] self.inheritable_handlers = [] @@ -1044,7 +1040,7 @@ class BaseCallbackManager(CallbackManagerMixin): Args: handler: The handler to set. - inherit: Whether to inherit the handler. Default is True. + inherit: Whether to inherit the handler. """ self.set_handlers([handler], inherit=inherit) @@ -1057,7 +1053,7 @@ class BaseCallbackManager(CallbackManagerMixin): Args: tags: The tags to add. - inherit: Whether to inherit the tags. Default is True. + inherit: Whether to inherit the tags. """ for tag in tags: if tag in self.tags: @@ -1087,7 +1083,7 @@ class BaseCallbackManager(CallbackManagerMixin): Args: metadata: The metadata to add. - inherit: Whether to inherit the metadata. Default is True. + inherit: Whether to inherit the metadata. """ self.metadata.update(metadata) if inherit: diff --git a/libs/core/langchain_core/callbacks/file.py b/libs/core/langchain_core/callbacks/file.py index e1bfc05dcb8..7f0c82531c1 100644 --- a/libs/core/langchain_core/callbacks/file.py +++ b/libs/core/langchain_core/callbacks/file.py @@ -47,7 +47,7 @@ class FileCallbackHandler(BaseCallbackHandler): Args: filename: The file path to write to. mode: The file open mode. Defaults to `'a'` (append). - color: Default color for text output. Defaults to `None`. + color: Default color for text output. !!! note When not used as a context manager, a deprecation warning will be issued @@ -64,7 +64,7 @@ class FileCallbackHandler(BaseCallbackHandler): Args: filename: Path to the output file. mode: File open mode (e.g., `'w'`, `'a'`, `'x'`). Defaults to `'a'`. - color: Default text color for output. Defaults to `None`. + color: Default text color for output. """ self.filename = filename @@ -132,7 +132,7 @@ class FileCallbackHandler(BaseCallbackHandler): Args: text: The text to write to the file. color: Optional color for the text. Defaults to `self.color`. - end: String appended after the text. Defaults to `""`. + end: String appended after the text. file: Optional file to write to. Defaults to `self.file`. Raises: @@ -239,7 +239,7 @@ class FileCallbackHandler(BaseCallbackHandler): text: The text to write. color: Color override for this specific output. If `None`, uses `self.color`. - end: String appended after the text. Defaults to `""`. + end: String appended after the text. **kwargs: Additional keyword arguments. """ diff --git a/libs/core/langchain_core/callbacks/manager.py b/libs/core/langchain_core/callbacks/manager.py index 428bd7cdb23..29db63609db 100644 --- a/libs/core/langchain_core/callbacks/manager.py +++ b/libs/core/langchain_core/callbacks/manager.py @@ -79,13 +79,13 @@ def trace_as_chain_group( Args: group_name: The name of the chain group. - callback_manager: The callback manager to use. Defaults to `None`. - inputs: The inputs to the chain group. Defaults to `None`. - project_name: The name of the project. Defaults to `None`. - example_id: The ID of the example. Defaults to `None`. + callback_manager: The callback manager to use. + inputs: The inputs to the chain group. + project_name: The name of the project. + example_id: The ID of the example. run_id: The ID of the run. - tags: The inheritable tags to apply to all runs. Defaults to `None`. - metadata: The metadata to apply to all runs. Defaults to `None`. + tags: The inheritable tags to apply to all runs. + metadata: The metadata to apply to all runs. !!! note Must have `LANGCHAIN_TRACING_V2` env var set to true to see the trace in @@ -155,13 +155,13 @@ async def atrace_as_chain_group( Args: group_name: The name of the chain group. callback_manager: The async callback manager to use, - which manages tracing and other callback behavior. Defaults to `None`. - inputs: The inputs to the chain group. Defaults to `None`. - project_name: The name of the project. Defaults to `None`. - example_id: The ID of the example. Defaults to `None`. + which manages tracing and other callback behavior. + inputs: The inputs to the chain group. + project_name: The name of the project. + example_id: The ID of the example. run_id: The ID of the run. - tags: The inheritable tags to apply to all runs. Defaults to `None`. - metadata: The metadata to apply to all runs. Defaults to `None`. + tags: The inheritable tags to apply to all runs. + metadata: The metadata to apply to all runs. Yields: The async callback manager for the chain group. @@ -229,7 +229,24 @@ def shielded(func: Func) -> Func: @functools.wraps(func) async def wrapped(*args: Any, **kwargs: Any) -> Any: - return await asyncio.shield(func(*args, **kwargs)) + # Capture the current context to preserve context variables + ctx = copy_context() + + # Create the coroutine + coro = func(*args, **kwargs) + + # For Python 3.11+, create task with explicit context + # For older versions, fallback to original behavior + try: + # Create a task with the captured context to preserve context variables + task = asyncio.create_task(coro, context=ctx) # type: ignore[call-arg, unused-ignore] + # `call-arg` used to not fail 3.9 or 3.10 tests + return await asyncio.shield(task) + except TypeError: + # Python < 3.11 fallback - create task normally then shield + # This won't preserve context perfectly but is better than nothing + task = asyncio.create_task(coro) + return await asyncio.shield(task) return cast("Func", wrapped) @@ -462,11 +479,11 @@ class BaseRunManager(RunManagerMixin): run_id: The ID of the run. handlers: The list of handlers. inheritable_handlers: The list of inheritable handlers. - parent_run_id: The ID of the parent run. Defaults to `None`. - tags: The list of tags. Defaults to `None`. - inheritable_tags: The list of inheritable tags. Defaults to `None`. - metadata: The metadata. Defaults to `None`. - inheritable_metadata: The inheritable metadata. Defaults to `None`. + parent_run_id: The ID of the parent run. + tags: The list of tags. + inheritable_tags: The list of inheritable tags. + metadata: The metadata. + inheritable_metadata: The inheritable metadata. """ self.run_id = run_id @@ -557,7 +574,7 @@ class ParentRunManager(RunManager): """Get a child callback manager. Args: - tag: The tag for the child callback manager. Defaults to `None`. + tag: The tag for the child callback manager. Returns: The child callback manager. @@ -641,7 +658,7 @@ class AsyncParentRunManager(AsyncRunManager): """Get a child callback manager. Args: - tag: The tag for the child callback manager. Defaults to `None`. + tag: The tag for the child callback manager. Returns: The child callback manager. @@ -1303,7 +1320,7 @@ class CallbackManager(BaseCallbackManager): Args: serialized: The serialized LLM. prompts: The list of prompts. - run_id: The ID of the run. Defaults to `None`. + run_id: The ID of the run. **kwargs: Additional keyword arguments. Returns: @@ -1354,7 +1371,7 @@ class CallbackManager(BaseCallbackManager): Args: serialized: The serialized LLM. messages: The list of messages. - run_id: The ID of the run. Defaults to `None`. + run_id: The ID of the run. **kwargs: Additional keyword arguments. Returns: @@ -1408,7 +1425,7 @@ class CallbackManager(BaseCallbackManager): Args: serialized: The serialized chain. inputs: The inputs to the chain. - run_id: The ID of the run. Defaults to `None`. + run_id: The ID of the run. **kwargs: Additional keyword arguments. Returns: @@ -1457,8 +1474,8 @@ class CallbackManager(BaseCallbackManager): serialized: Serialized representation of the tool. input_str: The input to the tool as a string. Non-string inputs are cast to strings. - run_id: ID for the run. Defaults to `None`. - parent_run_id: The ID of the parent run. Defaults to `None`. + run_id: ID for the run. + parent_run_id: The ID of the parent run. inputs: The original input to the tool if provided. Recommended for usage instead of input_str when the original input is needed. @@ -1512,8 +1529,8 @@ class CallbackManager(BaseCallbackManager): Args: serialized: The serialized retriever. query: The query. - run_id: The ID of the run. Defaults to `None`. - parent_run_id: The ID of the parent run. Defaults to `None`. + run_id: The ID of the run. + parent_run_id: The ID of the parent run. **kwargs: Additional keyword arguments. Returns: @@ -1562,13 +1579,10 @@ class CallbackManager(BaseCallbackManager): Args: name: The name of the adhoc event. data: The data for the adhoc event. - run_id: The ID of the run. Defaults to `None`. + run_id: The ID of the run. Raises: ValueError: If additional keyword arguments are passed. - - !!! version-added "Added in version 0.2.14" - """ if not self.handlers: return @@ -1782,7 +1796,7 @@ class AsyncCallbackManager(BaseCallbackManager): Args: serialized: The serialized LLM. prompts: The list of prompts. - run_id: The ID of the run. Defaults to `None`. + run_id: The ID of the run. **kwargs: Additional keyword arguments. Returns: @@ -1870,7 +1884,7 @@ class AsyncCallbackManager(BaseCallbackManager): Args: serialized: The serialized LLM. messages: The list of messages. - run_id: The ID of the run. Defaults to `None`. + run_id: The ID of the run. **kwargs: Additional keyword arguments. Returns: @@ -1941,7 +1955,7 @@ class AsyncCallbackManager(BaseCallbackManager): Args: serialized: The serialized chain. inputs: The inputs to the chain. - run_id: The ID of the run. Defaults to `None`. + run_id: The ID of the run. **kwargs: Additional keyword arguments. Returns: @@ -1988,8 +2002,8 @@ class AsyncCallbackManager(BaseCallbackManager): Args: serialized: The serialized tool. input_str: The input to the tool. - run_id: The ID of the run. Defaults to `None`. - parent_run_id: The ID of the parent run. Defaults to `None`. + run_id: The ID of the run. + parent_run_id: The ID of the parent run. **kwargs: Additional keyword arguments. Returns: @@ -2038,12 +2052,10 @@ class AsyncCallbackManager(BaseCallbackManager): Args: name: The name of the adhoc event. data: The data for the adhoc event. - run_id: The ID of the run. Defaults to `None`. + run_id: The ID of the run. Raises: ValueError: If additional keyword arguments are passed. - - !!! version-added "Added in version 0.2.14" """ if not self.handlers: return @@ -2082,8 +2094,8 @@ class AsyncCallbackManager(BaseCallbackManager): Args: serialized: The serialized retriever. query: The query. - run_id: The ID of the run. Defaults to `None`. - parent_run_id: The ID of the parent run. Defaults to `None`. + run_id: The ID of the run. + parent_run_id: The ID of the parent run. **kwargs: Additional keyword arguments. Returns: @@ -2555,9 +2567,6 @@ async def adispatch_custom_event( This is due to a limitation in asyncio for python <= 3.10 that prevents LangChain from automatically propagating the config object on the user's behalf. - - !!! version-added "Added in version 0.2.15" - """ # Import locally to prevent circular imports. from langchain_core.runnables.config import ( # noqa: PLC0415 @@ -2630,9 +2639,6 @@ def dispatch_custom_event( foo_ = RunnableLambda(foo) foo_.invoke({"a": "1"}, {"callbacks": [CustomCallbackManager()]}) ``` - - !!! version-added "Added in version 0.2.15" - """ # Import locally to prevent circular imports. from langchain_core.runnables.config import ( # noqa: PLC0415 diff --git a/libs/core/langchain_core/callbacks/stdout.py b/libs/core/langchain_core/callbacks/stdout.py index c2a1cfe805b..95259cfb38a 100644 --- a/libs/core/langchain_core/callbacks/stdout.py +++ b/libs/core/langchain_core/callbacks/stdout.py @@ -20,7 +20,7 @@ class StdOutCallbackHandler(BaseCallbackHandler): """Initialize callback handler. Args: - color: The color to use for the text. Defaults to `None`. + color: The color to use for the text. """ self.color = color @@ -61,7 +61,7 @@ class StdOutCallbackHandler(BaseCallbackHandler): Args: action: The agent action. - color: The color to use for the text. Defaults to `None`. + color: The color to use for the text. **kwargs: Additional keyword arguments. """ print_text(action.log, color=color or self.color) @@ -79,9 +79,9 @@ class StdOutCallbackHandler(BaseCallbackHandler): Args: output: The output to print. - color: The color to use for the text. Defaults to `None`. - observation_prefix: The observation prefix. Defaults to `None`. - llm_prefix: The LLM prefix. Defaults to `None`. + color: The color to use for the text. + observation_prefix: The observation prefix. + llm_prefix: The LLM prefix. **kwargs: Additional keyword arguments. """ output = str(output) @@ -103,8 +103,8 @@ class StdOutCallbackHandler(BaseCallbackHandler): Args: text: The text to print. - color: The color to use for the text. Defaults to `None`. - end: The end character to use. Defaults to "". + color: The color to use for the text. + end: The end character to use. **kwargs: Additional keyword arguments. """ print_text(text, color=color or self.color, end=end) @@ -117,7 +117,7 @@ class StdOutCallbackHandler(BaseCallbackHandler): Args: finish: The agent finish. - color: The color to use for the text. Defaults to `None`. + color: The color to use for the text. **kwargs: Additional keyword arguments. """ print_text(finish.log, color=color or self.color, end="\n") diff --git a/libs/core/langchain_core/callbacks/usage.py b/libs/core/langchain_core/callbacks/usage.py index f5b20aef0c1..b183a51383e 100644 --- a/libs/core/langchain_core/callbacks/usage.py +++ b/libs/core/langchain_core/callbacks/usage.py @@ -24,7 +24,7 @@ class UsageMetadataCallbackHandler(BaseCallbackHandler): from langchain_core.callbacks import UsageMetadataCallbackHandler llm_1 = init_chat_model(model="openai:gpt-4o-mini") - llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-latest") + llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-20241022") callback = UsageMetadataCallbackHandler() result_1 = llm_1.invoke("Hello", config={"callbacks": [callback]}) @@ -43,7 +43,7 @@ class UsageMetadataCallbackHandler(BaseCallbackHandler): 'input_token_details': {'cache_read': 0, 'cache_creation': 0}}} ``` - !!! version-added "Added in version 0.3.49" + !!! version-added "Added in `langchain-core` 0.3.49" """ @@ -109,7 +109,7 @@ def get_usage_metadata_callback( from langchain_core.callbacks import get_usage_metadata_callback llm_1 = init_chat_model(model="openai:gpt-4o-mini") - llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-latest") + llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-20241022") with get_usage_metadata_callback() as cb: llm_1.invoke("Hello") @@ -134,7 +134,7 @@ def get_usage_metadata_callback( } ``` - !!! version-added "Added in version 0.3.49" + !!! version-added "Added in `langchain-core` 0.3.49" """ usage_metadata_callback_var: ContextVar[UsageMetadataCallbackHandler | None] = ( diff --git a/libs/core/langchain_core/chat_history.py b/libs/core/langchain_core/chat_history.py index a4f135b8dfc..7c315d44d46 100644 --- a/libs/core/langchain_core/chat_history.py +++ b/libs/core/langchain_core/chat_history.py @@ -121,7 +121,7 @@ class BaseChatMessageHistory(ABC): This method may be deprecated in a future release. Args: - message: The human message to add to the store. + message: The `HumanMessage` to add to the store. """ if isinstance(message, HumanMessage): self.add_message(message) @@ -129,7 +129,7 @@ class BaseChatMessageHistory(ABC): self.add_message(HumanMessage(content=message)) def add_ai_message(self, message: AIMessage | str) -> None: - """Convenience method for adding an AI message string to the store. + """Convenience method for adding an `AIMessage` string to the store. !!! note This is a convenience method. Code should favor the bulk `add_messages` @@ -138,7 +138,7 @@ class BaseChatMessageHistory(ABC): This method may be deprecated in a future release. Args: - message: The AI message to add. + message: The `AIMessage` to add. """ if isinstance(message, AIMessage): self.add_message(message) @@ -153,7 +153,7 @@ class BaseChatMessageHistory(ABC): Raises: NotImplementedError: If the sub-class has not implemented an efficient - add_messages method. + `add_messages` method. """ if type(self).add_messages != BaseChatMessageHistory.add_messages: # This means that the sub-class has implemented an efficient add_messages @@ -173,7 +173,7 @@ class BaseChatMessageHistory(ABC): in an efficient manner to avoid unnecessary round-trips to the underlying store. Args: - messages: A sequence of BaseMessage objects to store. + messages: A sequence of `BaseMessage` objects to store. """ for message in messages: self.add_message(message) @@ -182,7 +182,7 @@ class BaseChatMessageHistory(ABC): """Async add a list of messages. Args: - messages: A sequence of BaseMessage objects to store. + messages: A sequence of `BaseMessage` objects to store. """ await run_in_executor(None, self.add_messages, messages) diff --git a/libs/core/langchain_core/document_loaders/base.py b/libs/core/langchain_core/document_loaders/base.py index 6f46b99b71c..e74bc5976c0 100644 --- a/libs/core/langchain_core/document_loaders/base.py +++ b/libs/core/langchain_core/document_loaders/base.py @@ -27,7 +27,7 @@ class BaseLoader(ABC): # noqa: B024 """Interface for Document Loader. Implementations should implement the lazy-loading method using generators - to avoid loading all Documents into memory at once. + to avoid loading all documents into memory at once. `load` is provided just for user convenience and should not be overridden. """ @@ -35,38 +35,40 @@ class BaseLoader(ABC): # noqa: B024 # Sub-classes should not implement this method directly. Instead, they # should implement the lazy load method. def load(self) -> list[Document]: - """Load data into Document objects. + """Load data into `Document` objects. Returns: - the documents. + The documents. """ return list(self.lazy_load()) async def aload(self) -> list[Document]: - """Load data into Document objects. + """Load data into `Document` objects. Returns: - the documents. + The documents. """ return [document async for document in self.alazy_load()] def load_and_split( self, text_splitter: TextSplitter | None = None ) -> list[Document]: - """Load Documents and split into chunks. Chunks are returned as Documents. + """Load `Document` and split into chunks. Chunks are returned as `Document`. - Do not override this method. It should be considered to be deprecated! + !!! danger + + Do not override this method. It should be considered to be deprecated! Args: - text_splitter: TextSplitter instance to use for splitting documents. - Defaults to RecursiveCharacterTextSplitter. + text_splitter: `TextSplitter` instance to use for splitting documents. + Defaults to `RecursiveCharacterTextSplitter`. Raises: - ImportError: If langchain-text-splitters is not installed - and no text_splitter is provided. + ImportError: If `langchain-text-splitters` is not installed + and no `text_splitter` is provided. Returns: - List of Documents. + List of `Document`. """ if text_splitter is None: if not _HAS_TEXT_SPLITTERS: @@ -86,10 +88,10 @@ class BaseLoader(ABC): # noqa: B024 # Attention: This method will be upgraded into an abstractmethod once it's # implemented in all the existing subclasses. def lazy_load(self) -> Iterator[Document]: - """A lazy loader for Documents. + """A lazy loader for `Document`. Yields: - the documents. + The `Document` objects. """ if type(self).load != BaseLoader.load: return iter(self.load()) @@ -97,10 +99,10 @@ class BaseLoader(ABC): # noqa: B024 raise NotImplementedError(msg) async def alazy_load(self) -> AsyncIterator[Document]: - """A lazy loader for Documents. + """A lazy loader for `Document`. Yields: - the documents. + The `Document` objects. """ iterator = await run_in_executor(None, self.lazy_load) done = object() @@ -115,7 +117,7 @@ class BaseBlobParser(ABC): """Abstract interface for blob parsers. A blob parser provides a way to parse raw data stored in a blob into one - or more documents. + or more `Document` objects. The parser can be composed with blob loaders, making it easy to reuse a parser independent of how the blob was originally loaded. @@ -128,25 +130,25 @@ class BaseBlobParser(ABC): Subclasses are required to implement this method. Args: - blob: Blob instance + blob: `Blob` instance Returns: - Generator of documents + Generator of `Document` objects """ def parse(self, blob: Blob) -> list[Document]: - """Eagerly parse the blob into a document or documents. + """Eagerly parse the blob into a `Document` or list of `Document` objects. This is a convenience method for interactive development environment. - Production applications should favor the lazy_parse method instead. + Production applications should favor the `lazy_parse` method instead. Subclasses should generally not over-ride this parse method. Args: - blob: Blob instance + blob: `Blob` instance Returns: - List of documents + List of `Document` objects """ return list(self.lazy_parse(blob)) diff --git a/libs/core/langchain_core/document_loaders/blob_loaders.py b/libs/core/langchain_core/document_loaders/blob_loaders.py index 6f26106ee30..8c6832177fd 100644 --- a/libs/core/langchain_core/document_loaders/blob_loaders.py +++ b/libs/core/langchain_core/document_loaders/blob_loaders.py @@ -28,7 +28,7 @@ class BlobLoader(ABC): def yield_blobs( self, ) -> Iterable[Blob]: - """A lazy loader for raw data represented by LangChain's Blob object. + """A lazy loader for raw data represented by LangChain's `Blob` object. Returns: A generator over blobs diff --git a/libs/core/langchain_core/document_loaders/langsmith.py b/libs/core/langchain_core/document_loaders/langsmith.py index ac69bef81e5..0ac054dc3b7 100644 --- a/libs/core/langchain_core/document_loaders/langsmith.py +++ b/libs/core/langchain_core/document_loaders/langsmith.py @@ -14,13 +14,13 @@ from langchain_core.documents import Document class LangSmithLoader(BaseLoader): - """Load LangSmith Dataset examples as Documents. + """Load LangSmith Dataset examples as `Document` objects. - Loads the example inputs as the Document page content and places the entire example - into the Document metadata. This allows you to easily create few-shot example - retrievers from the loaded documents. + Loads the example inputs as the `Document` page content and places the entire + example into the `Document` metadata. This allows you to easily create few-shot + example retrievers from the loaded documents. - ??? note "Lazy load" + ??? note "Lazy loading example" ```python from langchain_core.document_loaders import LangSmithLoader @@ -34,9 +34,6 @@ class LangSmithLoader(BaseLoader): ```python # -> [Document("...", metadata={"inputs": {...}, "outputs": {...}, ...}), ...] ``` - - !!! version-added "Added in version 0.2.34" - """ def __init__( @@ -60,26 +57,25 @@ class LangSmithLoader(BaseLoader): """Create a LangSmith loader. Args: - dataset_id: The ID of the dataset to filter by. Defaults to `None`. - dataset_name: The name of the dataset to filter by. Defaults to `None`. + dataset_id: The ID of the dataset to filter by. + dataset_name: The name of the dataset to filter by. content_key: The inputs key to set as Document page content. `'.'` characters are interpreted as nested keys. E.g. `content_key="first.second"` will result in `Document(page_content=format_content(example.inputs["first"]["second"]))` format_content: Function for converting the content extracted from the example inputs into a string. Defaults to JSON-encoding the contents. - example_ids: The IDs of the examples to filter by. Defaults to `None`. - as_of: The dataset version tag OR - timestamp to retrieve the examples as of. - Response examples will only be those that were present at the time - of the tagged (or timestamped) version. + example_ids: The IDs of the examples to filter by. + as_of: The dataset version tag or timestamp to retrieve the examples as of. + Response examples will only be those that were present at the time of + the tagged (or timestamped) version. splits: A list of dataset splits, which are - divisions of your dataset such as 'train', 'test', or 'validation'. + divisions of your dataset such as `train`, `test`, or `validation`. Returns examples only from the specified splits. - inline_s3_urls: Whether to inline S3 URLs. Defaults to `True`. - offset: The offset to start from. Defaults to 0. + inline_s3_urls: Whether to inline S3 URLs. + offset: The offset to start from. limit: The maximum number of examples to return. - metadata: Metadata to filter by. Defaults to `None`. + metadata: Metadata to filter by. filter: A structured filter string to apply to the examples. client: LangSmith Client. If not provided will be initialized from below args. client_kwargs: Keyword args to pass to LangSmith client init. Should only be diff --git a/libs/core/langchain_core/documents/__init__.py b/libs/core/langchain_core/documents/__init__.py index 2bf3f802197..1969f10935d 100644 --- a/libs/core/langchain_core/documents/__init__.py +++ b/libs/core/langchain_core/documents/__init__.py @@ -1,7 +1,28 @@ -"""Documents module. +"""Documents module for data retrieval and processing workflows. -**Document** module is a collection of classes that handle documents -and their transformations. +This module provides core abstractions for handling data in retrieval-augmented +generation (RAG) pipelines, vector stores, and document processing workflows. + +!!! warning "Documents vs. message content" + This module is distinct from `langchain_core.messages.content`, which provides + multimodal content blocks for **LLM chat I/O** (text, images, audio, etc. within + messages). + + **Key distinction:** + + - **Documents** (this module): For **data retrieval and processing workflows** + - Vector stores, retrievers, RAG pipelines + - Text chunking, embedding, and semantic search + - Example: Chunks of a PDF stored in a vector database + + - **Content Blocks** (`messages.content`): For **LLM conversational I/O** + - Multimodal message content sent to/from models + - Tool calls, reasoning, citations within chat + - Example: An image sent to a vision model in a chat message (via + [`ImageContentBlock`][langchain.messages.ImageContentBlock]) + + While both can represent similar data types (text, files), they serve different + architectural purposes in LangChain applications. """ from typing import TYPE_CHECKING diff --git a/libs/core/langchain_core/documents/base.py b/libs/core/langchain_core/documents/base.py index 8631392ac36..0341c8c184e 100644 --- a/libs/core/langchain_core/documents/base.py +++ b/libs/core/langchain_core/documents/base.py @@ -1,4 +1,16 @@ -"""Base classes for media and documents.""" +"""Base classes for media and documents. + +This module contains core abstractions for **data retrieval and processing workflows**: + +- `BaseMedia`: Base class providing `id` and `metadata` fields +- `Blob`: Raw data loading (files, binary data) - used by document loaders +- `Document`: Text content for retrieval (RAG, vector stores, semantic search) + +!!! note "Not for LLM chat messages" + These classes are for data processing pipelines, not LLM I/O. For multimodal + content in chat messages (images, audio in conversations), see + `langchain.messages` content blocks instead. +""" from __future__ import annotations @@ -19,27 +31,23 @@ PathLike = str | PurePath class BaseMedia(Serializable): - """Use to represent media content. + """Base class for content used in retrieval and data processing workflows. - Media objects can be used to represent raw data, such as text or binary data. + Provides common fields for content that needs to be stored, indexed, or searched. - LangChain Media objects allow associating metadata and an optional identifier - with the content. - - The presence of an ID and metadata make it easier to store, index, and search - over the content in a structured way. + !!! note + For multimodal content in **chat messages** (images, audio sent to/from LLMs), + use `langchain.messages` content blocks instead. """ # The ID field is optional at the moment. # It will likely become required in a future major release after - # it has been adopted by enough vectorstore implementations. + # it has been adopted by enough VectorStore implementations. id: str | None = Field(default=None, coerce_numbers_to_str=True) """An optional identifier for the document. Ideally this should be unique across the document collection and formatted as a UUID, but this will not be enforced. - - !!! version-added "Added in version 0.2.11" """ metadata: dict = Field(default_factory=dict) @@ -47,15 +55,14 @@ class BaseMedia(Serializable): class Blob(BaseMedia): - """Blob represents raw data by either reference or value. + """Raw data abstraction for document loading and file processing. - Provides an interface to materialize the blob in different representations, and - help to decouple the development of data loaders from the downstream parsing of - the raw data. + Represents raw bytes or text, either in-memory or by file reference. Used + primarily by document loaders to decouple data loading from parsing. - Inspired by: https://developer.mozilla.org/en-US/docs/Web/API/Blob + Inspired by [Mozilla's `Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) - Example: Initialize a blob from in-memory data + ???+ example "Initialize a blob from in-memory data" ```python from langchain_core.documents import Blob @@ -73,7 +80,7 @@ class Blob(BaseMedia): print(f.read()) ``` - Example: Load from memory and specify mime-type and metadata + ??? example "Load from memory and specify MIME type and metadata" ```python from langchain_core.documents import Blob @@ -85,7 +92,7 @@ class Blob(BaseMedia): ) ``` - Example: Load the blob from a file + ??? example "Load the blob from a file" ```python from langchain_core.documents import Blob @@ -105,13 +112,13 @@ class Blob(BaseMedia): """ data: bytes | str | None = None - """Raw data associated with the blob.""" + """Raw data associated with the `Blob`.""" mimetype: str | None = None - """MimeType not to be confused with a file extension.""" + """MIME type, not to be confused with a file extension.""" encoding: str = "utf-8" """Encoding to use if decoding the bytes into a string. - Use utf-8 as default encoding, if decoding to string. + Uses `utf-8` as default encoding if decoding to string. """ path: PathLike | None = None """Location where the original content was found.""" @@ -125,9 +132,9 @@ class Blob(BaseMedia): def source(self) -> str | None: """The source location of the blob as string if known otherwise none. - If a path is associated with the blob, it will default to the path location. + If a path is associated with the `Blob`, it will default to the path location. - Unless explicitly set via a metadata field called "source", in which + Unless explicitly set via a metadata field called `'source'`, in which case that value will be used instead. """ if self.metadata and "source" in self.metadata: @@ -211,15 +218,15 @@ class Blob(BaseMedia): """Load the blob from a path like object. Args: - path: path like object to file to be read + path: Path-like object to file to be read encoding: Encoding to use if decoding the bytes into a string - mime_type: if provided, will be set as the mime-type of the data - guess_type: If `True`, the mimetype will be guessed from the file extension, - if a mime-type was not provided - metadata: Metadata to associate with the blob + mime_type: If provided, will be set as the MIME type of the data + guess_type: If `True`, the MIME type will be guessed from the file + extension, if a MIME type was not provided + metadata: Metadata to associate with the `Blob` Returns: - Blob instance + `Blob` instance """ if mime_type is None and guess_type: mimetype = mimetypes.guess_type(path)[0] if guess_type else None @@ -245,17 +252,17 @@ class Blob(BaseMedia): path: str | None = None, metadata: dict | None = None, ) -> Blob: - """Initialize the blob from in-memory data. + """Initialize the `Blob` from in-memory data. Args: - data: the in-memory data associated with the blob + data: The in-memory data associated with the `Blob` encoding: Encoding to use if decoding the bytes into a string - mime_type: if provided, will be set as the mime-type of the data - path: if provided, will be set as the source from which the data came - metadata: Metadata to associate with the blob + mime_type: If provided, will be set as the MIME type of the data + path: If provided, will be set as the source from which the data came + metadata: Metadata to associate with the `Blob` Returns: - Blob instance + `Blob` instance """ return cls( data=data, @@ -276,6 +283,10 @@ class Blob(BaseMedia): class Document(BaseMedia): """Class for storing a piece of text and associated metadata. + !!! note + `Document` is for **retrieval workflows**, not chat I/O. For sending text + to an LLM in a conversation, use message types from `langchain.messages`. + Example: ```python from langchain_core.documents import Document @@ -298,12 +309,12 @@ class Document(BaseMedia): @classmethod def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: ["langchain", "schema", "document"] @@ -311,10 +322,10 @@ class Document(BaseMedia): return ["langchain", "schema", "document"] def __str__(self) -> str: - """Override __str__ to restrict it to page_content and metadata. + """Override `__str__` to restrict it to page_content and metadata. Returns: - A string representation of the Document. + A string representation of the `Document`. """ # The format matches pydantic format for __str__. # diff --git a/libs/core/langchain_core/documents/compressor.py b/libs/core/langchain_core/documents/compressor.py index b18728eb9d3..c765b378bb1 100644 --- a/libs/core/langchain_core/documents/compressor.py +++ b/libs/core/langchain_core/documents/compressor.py @@ -21,14 +21,14 @@ class BaseDocumentCompressor(BaseModel, ABC): This abstraction is primarily used for post-processing of retrieved documents. - Documents matching a given query are first retrieved. + `Document` objects matching a given query are first retrieved. Then the list of documents can be further processed. For example, one could re-rank the retrieved documents using an LLM. !!! note - Users should favor using a RunnableLambda instead of sub-classing from this + Users should favor using a `RunnableLambda` instead of sub-classing from this interface. """ @@ -43,9 +43,9 @@ class BaseDocumentCompressor(BaseModel, ABC): """Compress retrieved documents given the query context. Args: - documents: The retrieved documents. + documents: The retrieved `Document` objects. query: The query context. - callbacks: Optional callbacks to run during compression. + callbacks: Optional `Callbacks` to run during compression. Returns: The compressed documents. @@ -61,9 +61,9 @@ class BaseDocumentCompressor(BaseModel, ABC): """Async compress retrieved documents given the query context. Args: - documents: The retrieved documents. + documents: The retrieved `Document` objects. query: The query context. - callbacks: Optional callbacks to run during compression. + callbacks: Optional `Callbacks` to run during compression. Returns: The compressed documents. diff --git a/libs/core/langchain_core/documents/transformers.py b/libs/core/langchain_core/documents/transformers.py index 4cb37470e6b..c05fa29a239 100644 --- a/libs/core/langchain_core/documents/transformers.py +++ b/libs/core/langchain_core/documents/transformers.py @@ -16,8 +16,8 @@ if TYPE_CHECKING: class BaseDocumentTransformer(ABC): """Abstract base class for document transformation. - A document transformation takes a sequence of Documents and returns a - sequence of transformed Documents. + A document transformation takes a sequence of `Document` objects and returns a + sequence of transformed `Document` objects. Example: ```python @@ -57,10 +57,10 @@ class BaseDocumentTransformer(ABC): """Transform a list of documents. Args: - documents: A sequence of Documents to be transformed. + documents: A sequence of `Document` objects to be transformed. Returns: - A sequence of transformed Documents. + A sequence of transformed `Document` objects. """ async def atransform_documents( @@ -69,10 +69,10 @@ class BaseDocumentTransformer(ABC): """Asynchronously transform a list of documents. Args: - documents: A sequence of Documents to be transformed. + documents: A sequence of `Document` objects to be transformed. Returns: - A sequence of transformed Documents. + A sequence of transformed `Document` objects. """ return await run_in_executor( None, self.transform_documents, documents, **kwargs diff --git a/libs/core/langchain_core/embeddings/fake.py b/libs/core/langchain_core/embeddings/fake.py index 885366d2e62..0a252efc194 100644 --- a/libs/core/langchain_core/embeddings/fake.py +++ b/libs/core/langchain_core/embeddings/fake.py @@ -18,7 +18,8 @@ class FakeEmbeddings(Embeddings, BaseModel): This embedding model creates embeddings by sampling from a normal distribution. - Do not use this outside of testing, as it is not a real embedding model. + !!! danger "Toy model" + Do not use this outside of testing, as it is not a real embedding model. Instantiate: ```python @@ -72,7 +73,8 @@ class DeterministicFakeEmbedding(Embeddings, BaseModel): This embedding model creates embeddings by sampling from a normal distribution with a seed based on the hash of the text. - Do not use this outside of testing, as it is not a real embedding model. + !!! danger "Toy model" + Do not use this outside of testing, as it is not a real embedding model. Instantiate: ```python diff --git a/libs/core/langchain_core/example_selectors/length_based.py b/libs/core/langchain_core/example_selectors/length_based.py index 296db6c1c60..9424e645bf5 100644 --- a/libs/core/langchain_core/example_selectors/length_based.py +++ b/libs/core/langchain_core/example_selectors/length_based.py @@ -29,7 +29,7 @@ class LengthBasedExampleSelector(BaseExampleSelector, BaseModel): max_length: int = 2048 """Max length for the prompt, beyond which examples are cut.""" - example_text_lengths: list[int] = Field(default_factory=list) # :meta private: + example_text_lengths: list[int] = Field(default_factory=list) """Length of each example.""" def add_example(self, example: dict[str, str]) -> None: diff --git a/libs/core/langchain_core/example_selectors/semantic_similarity.py b/libs/core/langchain_core/example_selectors/semantic_similarity.py index 57c3ece590a..1e7491a2eb7 100644 --- a/libs/core/langchain_core/example_selectors/semantic_similarity.py +++ b/libs/core/langchain_core/example_selectors/semantic_similarity.py @@ -41,7 +41,7 @@ class _VectorStoreExampleSelector(BaseExampleSelector, BaseModel, ABC): """Optional keys to filter input to. If provided, the search is based on the input variables instead of all variables.""" vectorstore_kwargs: dict[str, Any] | None = None - """Extra arguments passed to similarity_search function of the vectorstore.""" + """Extra arguments passed to similarity_search function of the `VectorStore`.""" model_config = ConfigDict( arbitrary_types_allowed=True, @@ -154,12 +154,12 @@ class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector): examples: List of examples to use in the prompt. embeddings: An initialized embedding API interface, e.g. OpenAIEmbeddings(). vectorstore_cls: A vector store DB interface class, e.g. FAISS. - k: Number of examples to select. Default is 4. + k: Number of examples to select. input_keys: If provided, the search is based on the input variables instead of all variables. example_keys: If provided, keys to filter examples to. vectorstore_kwargs: Extra arguments passed to similarity_search function - of the vectorstore. + of the `VectorStore`. vectorstore_cls_kwargs: optional kwargs containing url for vector store Returns: @@ -198,12 +198,12 @@ class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector): examples: List of examples to use in the prompt. embeddings: An initialized embedding API interface, e.g. OpenAIEmbeddings(). vectorstore_cls: A vector store DB interface class, e.g. FAISS. - k: Number of examples to select. Default is 4. + k: Number of examples to select. input_keys: If provided, the search is based on the input variables instead of all variables. example_keys: If provided, keys to filter examples to. vectorstore_kwargs: Extra arguments passed to similarity_search function - of the vectorstore. + of the `VectorStore`. vectorstore_cls_kwargs: optional kwargs containing url for vector store Returns: @@ -285,14 +285,13 @@ class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector): examples: List of examples to use in the prompt. embeddings: An initialized embedding API interface, e.g. OpenAIEmbeddings(). vectorstore_cls: A vector store DB interface class, e.g. FAISS. - k: Number of examples to select. Default is 4. - fetch_k: Number of Documents to fetch to pass to MMR algorithm. - Default is 20. + k: Number of examples to select. + fetch_k: Number of `Document` objects to fetch to pass to MMR algorithm. input_keys: If provided, the search is based on the input variables instead of all variables. example_keys: If provided, keys to filter examples to. vectorstore_kwargs: Extra arguments passed to similarity_search function - of the vectorstore. + of the `VectorStore`. vectorstore_cls_kwargs: optional kwargs containing url for vector store Returns: @@ -333,14 +332,13 @@ class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector): examples: List of examples to use in the prompt. embeddings: An initialized embedding API interface, e.g. OpenAIEmbeddings(). vectorstore_cls: A vector store DB interface class, e.g. FAISS. - k: Number of examples to select. Default is 4. - fetch_k: Number of Documents to fetch to pass to MMR algorithm. - Default is 20. + k: Number of examples to select. + fetch_k: Number of `Document` objects to fetch to pass to MMR algorithm. input_keys: If provided, the search is based on the input variables instead of all variables. example_keys: If provided, keys to filter examples to. vectorstore_kwargs: Extra arguments passed to similarity_search function - of the vectorstore. + of the `VectorStore`. vectorstore_cls_kwargs: optional kwargs containing url for vector store Returns: diff --git a/libs/core/langchain_core/exceptions.py b/libs/core/langchain_core/exceptions.py index 375d2141f83..a1e8a5cb91a 100644 --- a/libs/core/langchain_core/exceptions.py +++ b/libs/core/langchain_core/exceptions.py @@ -16,9 +16,10 @@ class OutputParserException(ValueError, LangChainException): # noqa: N818 """Exception that output parsers should raise to signify a parsing error. This exists to differentiate parsing errors from other code or execution errors - that also may arise inside the output parser. OutputParserExceptions will be - available to catch and handle in ways to fix the parsing error, while other - errors will be raised. + that also may arise inside the output parser. + + `OutputParserException` will be available to catch and handle in ways to fix the + parsing error, while other errors will be raised. """ def __init__( @@ -28,23 +29,23 @@ class OutputParserException(ValueError, LangChainException): # noqa: N818 llm_output: str | None = None, send_to_llm: bool = False, # noqa: FBT001,FBT002 ): - """Create an OutputParserException. + """Create an `OutputParserException`. Args: error: The error that's being re-raised or an error message. - observation: String explanation of error which can be passed to a - model to try and remediate the issue. Defaults to `None`. + observation: String explanation of error which can be passed to a model to + try and remediate the issue. llm_output: String model output which is error-ing. - Defaults to `None`. + send_to_llm: Whether to send the observation and llm_output back to an Agent - after an OutputParserException has been raised. + after an `OutputParserException` has been raised. + This gives the underlying model driving the agent the context that the previous output was improperly structured, in the hopes that it will update the output to the correct format. - Defaults to `False`. Raises: - ValueError: If `send_to_llm` is True but either observation or + ValueError: If `send_to_llm` is `True` but either observation or `llm_output` are not provided. """ if isinstance(error, str): @@ -67,11 +68,11 @@ class ErrorCode(Enum): """Error codes.""" INVALID_PROMPT_INPUT = "INVALID_PROMPT_INPUT" - INVALID_TOOL_RESULTS = "INVALID_TOOL_RESULTS" + INVALID_TOOL_RESULTS = "INVALID_TOOL_RESULTS" # Used in JS; not Py (yet) MESSAGE_COERCION_FAILURE = "MESSAGE_COERCION_FAILURE" - MODEL_AUTHENTICATION = "MODEL_AUTHENTICATION" - MODEL_NOT_FOUND = "MODEL_NOT_FOUND" - MODEL_RATE_LIMIT = "MODEL_RATE_LIMIT" + MODEL_AUTHENTICATION = "MODEL_AUTHENTICATION" # Used in JS; not Py (yet) + MODEL_NOT_FOUND = "MODEL_NOT_FOUND" # Used in JS; not Py (yet) + MODEL_RATE_LIMIT = "MODEL_RATE_LIMIT" # Used in JS; not Py (yet) OUTPUT_PARSING_FAILURE = "OUTPUT_PARSING_FAILURE" @@ -87,6 +88,6 @@ def create_message(*, message: str, error_code: ErrorCode) -> str: """ return ( f"{message}\n" - "For troubleshooting, visit: https://python.langchain.com/docs/" - f"troubleshooting/errors/{error_code.value} " + "For troubleshooting, visit: https://docs.langchain.com/oss/python/langchain" + f"/errors/{error_code.value} " ) diff --git a/libs/core/langchain_core/indexing/__init__.py b/libs/core/langchain_core/indexing/__init__.py index aea7b9fadf5..ceb25d2d074 100644 --- a/libs/core/langchain_core/indexing/__init__.py +++ b/libs/core/langchain_core/indexing/__init__.py @@ -1,7 +1,7 @@ """Code to help indexing data into a vectorstore. This package contains helper logic to help deal with indexing data into -a vectorstore while avoiding duplicated content and over-writing content +a `VectorStore` while avoiding duplicated content and over-writing content if it's unchanged. """ diff --git a/libs/core/langchain_core/indexing/api.py b/libs/core/langchain_core/indexing/api.py index 4ef3776d123..de42a4f4f8e 100644 --- a/libs/core/langchain_core/indexing/api.py +++ b/libs/core/langchain_core/indexing/api.py @@ -298,61 +298,58 @@ def index( For the time being, documents are indexed using their hashes, and users are not able to specify the uid of the document. - !!! warning "Behavior changed in 0.3.25" + !!! warning "Behavior changed in `langchain-core` 0.3.25" Added `scoped_full` cleanup mode. !!! warning * In full mode, the loader should be returning - the entire dataset, and not just a subset of the dataset. - Otherwise, the auto_cleanup will remove documents that it is not - supposed to. + the entire dataset, and not just a subset of the dataset. + Otherwise, the auto_cleanup will remove documents that it is not + supposed to. * In incremental mode, if documents associated with a particular - source id appear across different batches, the indexing API - will do some redundant work. This will still result in the - correct end state of the index, but will unfortunately not be - 100% efficient. For example, if a given document is split into 15 - chunks, and we index them using a batch size of 5, we'll have 3 batches - all with the same source id. In general, to avoid doing too much - redundant work select as big a batch size as possible. + source id appear across different batches, the indexing API + will do some redundant work. This will still result in the + correct end state of the index, but will unfortunately not be + 100% efficient. For example, if a given document is split into 15 + chunks, and we index them using a batch size of 5, we'll have 3 batches + all with the same source id. In general, to avoid doing too much + redundant work select as big a batch size as possible. * The `scoped_full` mode is suitable if determining an appropriate batch size - is challenging or if your data loader cannot return the entire dataset at - once. This mode keeps track of source IDs in memory, which should be fine - for most use cases. If your dataset is large (10M+ docs), you will likely - need to parallelize the indexing process regardless. + is challenging or if your data loader cannot return the entire dataset at + once. This mode keeps track of source IDs in memory, which should be fine + for most use cases. If your dataset is large (10M+ docs), you will likely + need to parallelize the indexing process regardless. Args: docs_source: Data loader or iterable of documents to index. record_manager: Timestamped set to keep track of which documents were updated. - vector_store: VectorStore or DocumentIndex to index the documents into. - batch_size: Batch size to use when indexing. Default is 100. - cleanup: How to handle clean up of documents. Default is None. + vector_store: `VectorStore` or DocumentIndex to index the documents into. + batch_size: Batch size to use when indexing. + cleanup: How to handle clean up of documents. - incremental: Cleans up all documents that haven't been updated AND - that are associated with source ids that were seen during indexing. - Clean up is done continuously during indexing helping to minimize the - probability of users seeing duplicated content. + that are associated with source IDs that were seen during indexing. + Clean up is done continuously during indexing helping to minimize the + probability of users seeing duplicated content. - full: Delete all documents that have not been returned by the loader - during this run of indexing. - Clean up runs after all documents have been indexed. - This means that users may see duplicated content during indexing. + during this run of indexing. + Clean up runs after all documents have been indexed. + This means that users may see duplicated content during indexing. - scoped_full: Similar to Full, but only deletes all documents - that haven't been updated AND that are associated with - source ids that were seen during indexing. + that haven't been updated AND that are associated with + source IDs that were seen during indexing. - None: Do not delete any documents. source_id_key: Optional key that helps identify the original source - of the document. Default is None. + of the document. cleanup_batch_size: Batch size to use when cleaning up documents. - Default is 1_000. force_update: Force update documents even if they are present in the record manager. Useful if you are re-indexing with updated embeddings. - Default is False. key_encoder: Hashing algorithm to use for hashing the document content and - metadata. Default is "sha1". - Other options include "blake2b", "sha256", and "sha512". + metadata. Options include "blake2b", "sha256", and "sha512". - !!! version-added "Added in version 0.3.66" + !!! version-added "Added in `langchain-core` 0.3.66" key_encoder: Hashing algorithm to use for hashing the document. If not provided, a default encoder using SHA-1 will be used. @@ -366,10 +363,10 @@ def index( When changing the key encoder, you must change the index as well to avoid duplicated documents in the cache. upsert_kwargs: Additional keyword arguments to pass to the add_documents - method of the VectorStore or the upsert method of the DocumentIndex. + method of the `VectorStore` or the upsert method of the DocumentIndex. For example, you can use this to specify a custom vector_field: upsert_kwargs={"vector_field": "embedding"} - !!! version-added "Added in version 0.3.10" + !!! version-added "Added in `langchain-core` 0.3.10" Returns: Indexing result which contains information about how many documents @@ -378,10 +375,10 @@ def index( Raises: ValueError: If cleanup mode is not one of 'incremental', 'full' or None ValueError: If cleanup mode is incremental and source_id_key is None. - ValueError: If vectorstore does not have + ValueError: If `VectorStore` does not have "delete" and "add_documents" required methods. ValueError: If source_id_key is not None, but is not a string or callable. - TypeError: If `vectorstore` is not a VectorStore or a DocumentIndex. + TypeError: If `vectorstore` is not a `VectorStore` or a DocumentIndex. AssertionError: If `source_id` is None when cleanup mode is incremental. (should be unreachable code). """ @@ -418,7 +415,7 @@ def index( raise ValueError(msg) if type(destination).delete == VectorStore.delete: - # Checking if the vectorstore has overridden the default delete method + # Checking if the VectorStore has overridden the default delete method # implementation which just raises a NotImplementedError msg = "Vectorstore has not implemented the delete method" raise ValueError(msg) @@ -469,11 +466,11 @@ def index( ] if cleanup in {"incremental", "scoped_full"}: - # source ids are required. + # Source IDs are required. for source_id, hashed_doc in zip(source_ids, hashed_docs, strict=False): if source_id is None: msg = ( - f"Source ids are required when cleanup mode is " + f"Source IDs are required when cleanup mode is " f"incremental or scoped_full. " f"Document that starts with " f"content: {hashed_doc.page_content[:100]} " @@ -482,7 +479,7 @@ def index( raise ValueError(msg) if cleanup == "scoped_full": scoped_full_cleanup_source_ids.add(source_id) - # source ids cannot be None after for loop above. + # Source IDs cannot be None after for loop above. source_ids = cast("Sequence[str]", source_ids) exists_batch = record_manager.exists( @@ -541,7 +538,7 @@ def index( # If source IDs are provided, we can do the deletion incrementally! if cleanup == "incremental": # Get the uids of the documents that were not returned by the loader. - # mypy isn't good enough to determine that source ids cannot be None + # mypy isn't good enough to determine that source IDs cannot be None # here due to a check that's happening above, so we check again. for source_id in source_ids: if source_id is None: @@ -639,61 +636,58 @@ async def aindex( For the time being, documents are indexed using their hashes, and users are not able to specify the uid of the document. - !!! warning "Behavior changed in 0.3.25" + !!! warning "Behavior changed in `langchain-core` 0.3.25" Added `scoped_full` cleanup mode. !!! warning * In full mode, the loader should be returning - the entire dataset, and not just a subset of the dataset. - Otherwise, the auto_cleanup will remove documents that it is not - supposed to. + the entire dataset, and not just a subset of the dataset. + Otherwise, the auto_cleanup will remove documents that it is not + supposed to. * In incremental mode, if documents associated with a particular - source id appear across different batches, the indexing API - will do some redundant work. This will still result in the - correct end state of the index, but will unfortunately not be - 100% efficient. For example, if a given document is split into 15 - chunks, and we index them using a batch size of 5, we'll have 3 batches - all with the same source id. In general, to avoid doing too much - redundant work select as big a batch size as possible. + source id appear across different batches, the indexing API + will do some redundant work. This will still result in the + correct end state of the index, but will unfortunately not be + 100% efficient. For example, if a given document is split into 15 + chunks, and we index them using a batch size of 5, we'll have 3 batches + all with the same source id. In general, to avoid doing too much + redundant work select as big a batch size as possible. * The `scoped_full` mode is suitable if determining an appropriate batch size - is challenging or if your data loader cannot return the entire dataset at - once. This mode keeps track of source IDs in memory, which should be fine - for most use cases. If your dataset is large (10M+ docs), you will likely - need to parallelize the indexing process regardless. + is challenging or if your data loader cannot return the entire dataset at + once. This mode keeps track of source IDs in memory, which should be fine + for most use cases. If your dataset is large (10M+ docs), you will likely + need to parallelize the indexing process regardless. Args: docs_source: Data loader or iterable of documents to index. record_manager: Timestamped set to keep track of which documents were updated. - vector_store: VectorStore or DocumentIndex to index the documents into. - batch_size: Batch size to use when indexing. Default is 100. - cleanup: How to handle clean up of documents. Default is None. + vector_store: `VectorStore` or DocumentIndex to index the documents into. + batch_size: Batch size to use when indexing. + cleanup: How to handle clean up of documents. - incremental: Cleans up all documents that haven't been updated AND - that are associated with source ids that were seen during indexing. - Clean up is done continuously during indexing helping to minimize the - probability of users seeing duplicated content. + that are associated with source IDs that were seen during indexing. + Clean up is done continuously during indexing helping to minimize the + probability of users seeing duplicated content. - full: Delete all documents that have not been returned by the loader - during this run of indexing. - Clean up runs after all documents have been indexed. - This means that users may see duplicated content during indexing. + during this run of indexing. + Clean up runs after all documents have been indexed. + This means that users may see duplicated content during indexing. - scoped_full: Similar to Full, but only deletes all documents - that haven't been updated AND that are associated with - source ids that were seen during indexing. + that haven't been updated AND that are associated with + source IDs that were seen during indexing. - None: Do not delete any documents. source_id_key: Optional key that helps identify the original source - of the document. Default is None. + of the document. cleanup_batch_size: Batch size to use when cleaning up documents. - Default is 1_000. force_update: Force update documents even if they are present in the record manager. Useful if you are re-indexing with updated embeddings. - Default is False. key_encoder: Hashing algorithm to use for hashing the document content and - metadata. Default is "sha1". - Other options include "blake2b", "sha256", and "sha512". + metadata. Options include "blake2b", "sha256", and "sha512". - !!! version-added "Added in version 0.3.66" + !!! version-added "Added in `langchain-core` 0.3.66" key_encoder: Hashing algorithm to use for hashing the document. If not provided, a default encoder using SHA-1 will be used. @@ -707,10 +701,10 @@ async def aindex( When changing the key encoder, you must change the index as well to avoid duplicated documents in the cache. upsert_kwargs: Additional keyword arguments to pass to the add_documents - method of the VectorStore or the upsert method of the DocumentIndex. + method of the `VectorStore` or the upsert method of the DocumentIndex. For example, you can use this to specify a custom vector_field: upsert_kwargs={"vector_field": "embedding"} - !!! version-added "Added in version 0.3.10" + !!! version-added "Added in `langchain-core` 0.3.10" Returns: Indexing result which contains information about how many documents @@ -719,10 +713,10 @@ async def aindex( Raises: ValueError: If cleanup mode is not one of 'incremental', 'full' or None ValueError: If cleanup mode is incremental and source_id_key is None. - ValueError: If vectorstore does not have + ValueError: If `VectorStore` does not have "adelete" and "aadd_documents" required methods. ValueError: If source_id_key is not None, but is not a string or callable. - TypeError: If `vector_store` is not a VectorStore or DocumentIndex. + TypeError: If `vector_store` is not a `VectorStore` or DocumentIndex. AssertionError: If `source_id_key` is None when cleanup mode is incremental or `scoped_full` (should be unreachable). """ @@ -763,7 +757,7 @@ async def aindex( type(destination).adelete == VectorStore.adelete and type(destination).delete == VectorStore.delete ): - # Checking if the vectorstore has overridden the default adelete or delete + # Checking if the VectorStore has overridden the default adelete or delete # methods implementation which just raises a NotImplementedError msg = "Vectorstore has not implemented the adelete or delete method" raise ValueError(msg) @@ -821,11 +815,11 @@ async def aindex( ] if cleanup in {"incremental", "scoped_full"}: - # If the cleanup mode is incremental, source ids are required. + # If the cleanup mode is incremental, source IDs are required. for source_id, hashed_doc in zip(source_ids, hashed_docs, strict=False): if source_id is None: msg = ( - f"Source ids are required when cleanup mode is " + f"Source IDs are required when cleanup mode is " f"incremental or scoped_full. " f"Document that starts with " f"content: {hashed_doc.page_content[:100]} " @@ -834,7 +828,7 @@ async def aindex( raise ValueError(msg) if cleanup == "scoped_full": scoped_full_cleanup_source_ids.add(source_id) - # source ids cannot be None after for loop above. + # Source IDs cannot be None after for loop above. source_ids = cast("Sequence[str]", source_ids) exists_batch = await record_manager.aexists( @@ -894,7 +888,7 @@ async def aindex( if cleanup == "incremental": # Get the uids of the documents that were not returned by the loader. - # mypy isn't good enough to determine that source ids cannot be None + # mypy isn't good enough to determine that source IDs cannot be None # here due to a check that's happening above, so we check again. for source_id in source_ids: if source_id is None: diff --git a/libs/core/langchain_core/indexing/base.py b/libs/core/langchain_core/indexing/base.py index a6f85dd7494..d8a891ddf9e 100644 --- a/libs/core/langchain_core/indexing/base.py +++ b/libs/core/langchain_core/indexing/base.py @@ -25,7 +25,7 @@ class RecordManager(ABC): The record manager abstraction is used by the langchain indexing API. The record manager keeps track of which documents have been - written into a vectorstore and when they were written. + written into a `VectorStore` and when they were written. The indexing API computes hashes for each document and stores the hash together with the write time and the source id in the record manager. @@ -37,7 +37,7 @@ class RecordManager(ABC): already been indexed, and to only index new documents. The main benefit of this abstraction is that it works across many vectorstores. - To be supported, a vectorstore needs to only support the ability to add and + To be supported, a `VectorStore` needs to only support the ability to add and delete documents by ID. Using the record manager, the indexing API will be able to delete outdated documents and avoid redundant indexing of documents that have already been indexed. @@ -45,13 +45,13 @@ class RecordManager(ABC): The main constraints of this abstraction are: 1. It relies on the time-stamps to determine which documents have been - indexed and which have not. This means that the time-stamps must be - monotonically increasing. The timestamp should be the timestamp - as measured by the server to minimize issues. + indexed and which have not. This means that the time-stamps must be + monotonically increasing. The timestamp should be the timestamp + as measured by the server to minimize issues. 2. The record manager is currently implemented separately from the - vectorstore, which means that the overall system becomes distributed - and may create issues with consistency. For example, writing to - record manager succeeds, but corresponding writing to vectorstore fails. + vectorstore, which means that the overall system becomes distributed + and may create issues with consistency. For example, writing to + record manager succeeds, but corresponding writing to `VectorStore` fails. """ def __init__( @@ -278,10 +278,10 @@ class InMemoryRecordManager(RecordManager): Args: keys: A list of record keys to upsert. group_ids: A list of group IDs corresponding to the keys. - Defaults to `None`. + time_at_least: Optional timestamp. Implementation can use this to optionally verify that the timestamp IS at least this time - in the system that stores. Defaults to `None`. + in the system that stores. E.g., use to validate that the time in the postgres database is equal to or larger than the given timestamp, if not raise an error. @@ -315,10 +315,10 @@ class InMemoryRecordManager(RecordManager): Args: keys: A list of record keys to upsert. group_ids: A list of group IDs corresponding to the keys. - Defaults to `None`. + time_at_least: Optional timestamp. Implementation can use this to optionally verify that the timestamp IS at least this time - in the system that stores. Defaults to `None`. + in the system that stores. E.g., use to validate that the time in the postgres database is equal to or larger than the given timestamp, if not raise an error. @@ -361,13 +361,13 @@ class InMemoryRecordManager(RecordManager): Args: before: Filter to list records updated before this time. - Defaults to `None`. + after: Filter to list records updated after this time. - Defaults to `None`. + group_ids: Filter to list records with specific group IDs. - Defaults to `None`. + limit: optional limit on the number of records to return. - Defaults to `None`. + Returns: A list of keys for the matching records. @@ -397,13 +397,13 @@ class InMemoryRecordManager(RecordManager): Args: before: Filter to list records updated before this time. - Defaults to `None`. + after: Filter to list records updated after this time. - Defaults to `None`. + group_ids: Filter to list records with specific group IDs. - Defaults to `None`. + limit: optional limit on the number of records to return. - Defaults to `None`. + Returns: A list of keys for the matching records. @@ -460,7 +460,7 @@ class UpsertResponse(TypedDict): class DeleteResponse(TypedDict, total=False): """A generic response for delete operation. - The fields in this response are optional and whether the vectorstore + The fields in this response are optional and whether the `VectorStore` returns them or not is up to the implementation. """ @@ -508,8 +508,6 @@ class DocumentIndex(BaseRetriever): 1. Storing document in the index. 2. Fetching document by ID. 3. Searching for document using a query. - - !!! version-added "Added in version 0.2.29" """ @abc.abstractmethod @@ -520,40 +518,40 @@ class DocumentIndex(BaseRetriever): if it is provided. If the ID is not provided, the upsert method is free to generate an ID for the content. - When an ID is specified and the content already exists in the vectorstore, + When an ID is specified and the content already exists in the `VectorStore`, the upsert method should update the content with the new data. If the content - does not exist, the upsert method should add the item to the vectorstore. + does not exist, the upsert method should add the item to the `VectorStore`. Args: - items: Sequence of documents to add to the vectorstore. + items: Sequence of documents to add to the `VectorStore`. **kwargs: Additional keyword arguments. Returns: A response object that contains the list of IDs that were - successfully added or updated in the vectorstore and the list of IDs that + successfully added or updated in the `VectorStore` and the list of IDs that failed to be added or updated. """ async def aupsert( self, items: Sequence[Document], /, **kwargs: Any ) -> UpsertResponse: - """Add or update documents in the vectorstore. Async version of upsert. + """Add or update documents in the `VectorStore`. Async version of `upsert`. The upsert functionality should utilize the ID field of the item if it is provided. If the ID is not provided, the upsert method is free to generate an ID for the item. - When an ID is specified and the item already exists in the vectorstore, + When an ID is specified and the item already exists in the `VectorStore`, the upsert method should update the item with the new data. If the item - does not exist, the upsert method should add the item to the vectorstore. + does not exist, the upsert method should add the item to the `VectorStore`. Args: - items: Sequence of documents to add to the vectorstore. + items: Sequence of documents to add to the `VectorStore`. **kwargs: Additional keyword arguments. Returns: A response object that contains the list of IDs that were - successfully added or updated in the vectorstore and the list of IDs that + successfully added or updated in the `VectorStore` and the list of IDs that failed to be added or updated. """ return await run_in_executor( @@ -570,7 +568,7 @@ class DocumentIndex(BaseRetriever): Calling delete without any input parameters should raise a ValueError! Args: - ids: List of ids to delete. + ids: List of IDs to delete. **kwargs: Additional keyword arguments. This is up to the implementation. For example, can include an option to delete the entire index, or else issue a non-blocking delete etc. @@ -588,7 +586,7 @@ class DocumentIndex(BaseRetriever): Calling adelete without any input parameters should raise a ValueError! Args: - ids: List of ids to delete. + ids: List of IDs to delete. **kwargs: Additional keyword arguments. This is up to the implementation. For example, can include an option to delete the entire index. diff --git a/libs/core/langchain_core/indexing/in_memory.py b/libs/core/langchain_core/indexing/in_memory.py index 1ef40f9947e..ae9cf84088d 100644 --- a/libs/core/langchain_core/indexing/in_memory.py +++ b/libs/core/langchain_core/indexing/in_memory.py @@ -23,8 +23,6 @@ class InMemoryDocumentIndex(DocumentIndex): It provides a simple search API that returns documents by the number of counts the given query appears in the document. - - !!! version-added "Added in version 0.2.29" """ store: dict[str, Document] = Field(default_factory=dict) @@ -64,10 +62,10 @@ class InMemoryDocumentIndex(DocumentIndex): """Delete by IDs. Args: - ids: List of ids to delete. + ids: List of IDs to delete. Raises: - ValueError: If ids is None. + ValueError: If IDs is None. Returns: A response object that contains the list of IDs that were successfully diff --git a/libs/core/langchain_core/language_models/__init__.py b/libs/core/langchain_core/language_models/__init__.py index 6e2770ecf10..625543c830f 100644 --- a/libs/core/langchain_core/language_models/__init__.py +++ b/libs/core/langchain_core/language_models/__init__.py @@ -1,43 +1,30 @@ """Language models. -**Language Model** is a type of model that can generate text or complete -text prompts. +LangChain has two main classes to work with language models: chat models and +"old-fashioned" LLMs. -LangChain has two main classes to work with language models: **Chat Models** -and "old-fashioned" **LLMs**. - -**Chat Models** +**Chat models** Language models that use a sequence of messages as inputs and return chat messages -as outputs (as opposed to using plain text). These are traditionally newer models ( -older models are generally LLMs, see below). Chat models support the assignment of -distinct roles to conversation messages, helping to distinguish messages from the AI, -users, and instructions such as system messages. +as outputs (as opposed to using plain text). -The key abstraction for chat models is `BaseChatModel`. Implementations -should inherit from this class. Please see LangChain how-to guides with more -information on how to implement a custom chat model. +Chat models support the assignment of distinct roles to conversation messages, helping +to distinguish messages from the AI, users, and instructions such as system messages. -To implement a custom Chat Model, inherit from `BaseChatModel`. See -the following guide for more information on how to implement a custom Chat Model: +The key abstraction for chat models is `BaseChatModel`. Implementations should inherit +from this class. -https://python.langchain.com/docs/how_to/custom_chat_model/ +See existing [chat model integrations](https://docs.langchain.com/oss/python/integrations/chat). **LLMs** Language models that takes a string as input and returns a string. -These are traditionally older models (newer models generally are Chat Models, -see below). +These are traditionally older models (newer models generally are chat models). -Although the underlying models are string in, string out, the LangChain wrappers -also allow these models to take messages as input. This gives them the same interface -as Chat Models. When messages are passed in as input, they will be formatted into a -string under the hood before being passed to the underlying model. - -To implement a custom LLM, inherit from `BaseLLM` or `LLM`. -Please see the following guide for more information on how to implement a custom LLM: - -https://python.langchain.com/docs/how_to/custom_llm/ +Although the underlying models are string in, string out, the LangChain wrappers also +allow these models to take messages as input. This gives them the same interface as +chat models. When messages are passed in as input, they will be formatted into a string +under the hood before being passed to the underlying model. """ from typing import TYPE_CHECKING diff --git a/libs/core/langchain_core/language_models/_utils.py b/libs/core/langchain_core/language_models/_utils.py index 48141b54812..d777425dde6 100644 --- a/libs/core/langchain_core/language_models/_utils.py +++ b/libs/core/langchain_core/language_models/_utils.py @@ -35,7 +35,7 @@ def is_openai_data_block( different type, this function will return False. Returns: - True if the block is a valid OpenAI data block and matches the filter_ + `True` if the block is a valid OpenAI data block and matches the filter_ (if provided). """ @@ -89,7 +89,8 @@ class ParsedDataUri(TypedDict): def _parse_data_uri(uri: str) -> ParsedDataUri | None: """Parse a data URI into its components. - If parsing fails, return None. If either MIME type or data is missing, return None. + If parsing fails, return `None`. If either MIME type or data is missing, return + `None`. Example: ```python @@ -138,7 +139,7 @@ def _normalize_messages( directly; this may change in the future - LangChain v0 standard content blocks for backward compatibility - !!! warning "Behavior changed in 1.0.0" + !!! warning "Behavior changed in `langchain-core` 1.0.0" In previous versions, this function returned messages in LangChain v0 format. Now, it returns messages in LangChain v1 format, which upgraded chat models now expect to receive when passing back in message history. For backward diff --git a/libs/core/langchain_core/language_models/base.py b/libs/core/langchain_core/language_models/base.py index 726c7aee6b0..58c6d4abf7e 100644 --- a/libs/core/langchain_core/language_models/base.py +++ b/libs/core/langchain_core/language_models/base.py @@ -96,9 +96,16 @@ def _get_token_ids_default_method(text: str) -> list[int]: LanguageModelInput = PromptValue | str | Sequence[MessageLikeRepresentation] +"""Input to a language model.""" + LanguageModelOutput = BaseMessage | str +"""Output from a language model.""" + LanguageModelLike = Runnable[LanguageModelInput, LanguageModelOutput] +"""Input/output interface for a language model.""" + LanguageModelOutputVar = TypeVar("LanguageModelOutputVar", AIMessage, str) +"""Type variable for the output of a language model.""" def _get_verbosity() -> bool: @@ -123,16 +130,20 @@ class BaseLanguageModel( * If instance of `BaseCache`, will use the provided cache. Caching is not currently supported for streaming methods of models. - """ + verbose: bool = Field(default_factory=_get_verbosity, exclude=True, repr=False) """Whether to print out response text.""" + callbacks: Callbacks = Field(default=None, exclude=True) """Callbacks to add to the run trace.""" + tags: list[str] | None = Field(default=None, exclude=True) """Tags to add to the run trace.""" + metadata: dict[str, Any] | None = Field(default=None, exclude=True) """Metadata to add to the run trace.""" + custom_get_token_ids: Callable[[str], list[int]] | None = Field( default=None, exclude=True ) @@ -146,7 +157,7 @@ class BaseLanguageModel( def set_verbose(cls, verbose: bool | None) -> bool: # noqa: FBT001 """If verbose is `None`, set it. - This allows users to pass in None as verbose to access the global setting. + This allows users to pass in `None` as verbose to access the global setting. Args: verbose: The verbosity setting to use. @@ -186,22 +197,29 @@ class BaseLanguageModel( 1. Take advantage of batched calls, 2. Need more output from the model than just the top generated value, 3. Are building chains that are agnostic to the underlying language model - type (e.g., pure text completion models vs chat models). + type (e.g., pure text completion models vs chat models). Args: - prompts: List of PromptValues. A PromptValue is an object that can be - converted to match the format of any language model (string for pure - text generation models and BaseMessages for chat models). - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of these substrings. - callbacks: Callbacks to pass through. Used for executing additional - functionality, such as logging or streaming, throughout generation. - **kwargs: Arbitrary additional keyword arguments. These are usually passed - to the model provider API call. + prompts: List of `PromptValue` objects. + + A `PromptValue` is an object that can be converted to match the format + of any language model (string for pure text generation models and + `BaseMessage` objects for chat models). + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. + callbacks: `Callbacks` to pass through. + + Used for executing additional functionality, such as logging or + streaming, throughout generation. + **kwargs: Arbitrary additional keyword arguments. + + These are usually passed to the model provider API call. Returns: - An LLMResult, which contains a list of candidate Generations for each input - prompt and additional model provider-specific output. + An `LLMResult`, which contains a list of candidate `Generation` objects for + each input prompt and additional model provider-specific output. """ @@ -223,22 +241,29 @@ class BaseLanguageModel( 1. Take advantage of batched calls, 2. Need more output from the model than just the top generated value, 3. Are building chains that are agnostic to the underlying language model - type (e.g., pure text completion models vs chat models). + type (e.g., pure text completion models vs chat models). Args: - prompts: List of PromptValues. A PromptValue is an object that can be - converted to match the format of any language model (string for pure - text generation models and BaseMessages for chat models). - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of these substrings. - callbacks: Callbacks to pass through. Used for executing additional - functionality, such as logging or streaming, throughout generation. - **kwargs: Arbitrary additional keyword arguments. These are usually passed - to the model provider API call. + prompts: List of `PromptValue` objects. + + A `PromptValue` is an object that can be converted to match the format + of any language model (string for pure text generation models and + `BaseMessage` objects for chat models). + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. + callbacks: `Callbacks` to pass through. + + Used for executing additional functionality, such as logging or + streaming, throughout generation. + **kwargs: Arbitrary additional keyword arguments. + + These are usually passed to the model provider API call. Returns: - An `LLMResult`, which contains a list of candidate Generations for each - input prompt and additional model provider-specific output. + An `LLMResult`, which contains a list of candidate `Generation` objects for + each input prompt and additional model provider-specific output. """ @@ -256,15 +281,14 @@ class BaseLanguageModel( return self.lc_attributes def get_token_ids(self, text: str) -> list[int]: - """Return the ordered ids of the tokens in a text. + """Return the ordered IDs of the tokens in a text. Args: text: The string input to tokenize. Returns: - A list of ids corresponding to the tokens in the text, in order they occur - in the text. - + A list of IDs corresponding to the tokens in the text, in order they occur + in the text. """ if self.custom_get_token_ids is not None: return self.custom_get_token_ids(text) diff --git a/libs/core/langchain_core/language_models/chat_models.py b/libs/core/langchain_core/language_models/chat_models.py index 6506111092e..f37113b4661 100644 --- a/libs/core/langchain_core/language_models/chat_models.py +++ b/libs/core/langchain_core/language_models/chat_models.py @@ -15,6 +15,7 @@ from typing import TYPE_CHECKING, Any, Literal, cast from pydantic import BaseModel, ConfigDict, Field from typing_extensions import override +from langchain_core._api.beta_decorator import beta from langchain_core.caches import BaseCache from langchain_core.callbacks import ( AsyncCallbackManager, @@ -75,6 +76,8 @@ from langchain_core.utils.utils import LC_ID_PREFIX, from_env if TYPE_CHECKING: import uuid + from langchain_model_profiles import ModelProfile # type: ignore[import-untyped] + from langchain_core.output_parsers.base import OutputParserLike from langchain_core.runnables import Runnable, RunnableConfig from langchain_core.tools import BaseTool @@ -240,79 +243,52 @@ def _format_ls_structured_output(ls_structured_output_format: dict | None) -> di class BaseChatModel(BaseLanguageModel[AIMessage], ABC): - """Base class for chat models. + r"""Base class for chat models. Key imperative methods: Methods that actually call the underlying model. - +---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+ - | Method | Input | Output | Description | - +===========================+================================================================+=====================================================================+==================================================================================================+ - | `invoke` | str | list[dict | tuple | BaseMessage] | PromptValue | BaseMessage | A single chat model call. | - +---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+ - | `ainvoke` | ''' | BaseMessage | Defaults to running invoke in an async executor. | - +---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+ - | `stream` | ''' | Iterator[BaseMessageChunk] | Defaults to yielding output of invoke. | - +---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+ - | `astream` | ''' | AsyncIterator[BaseMessageChunk] | Defaults to yielding output of ainvoke. | - +---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+ - | `astream_events` | ''' | AsyncIterator[StreamEvent] | Event types: 'on_chat_model_start', 'on_chat_model_stream', 'on_chat_model_end'. | - +---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+ - | `batch` | list['''] | list[BaseMessage] | Defaults to running invoke in concurrent threads. | - +---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+ - | `abatch` | list['''] | list[BaseMessage] | Defaults to running ainvoke in concurrent threads. | - +---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+ - | `batch_as_completed` | list['''] | Iterator[tuple[int, Union[BaseMessage, Exception]]] | Defaults to running invoke in concurrent threads. | - +---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+ - | `abatch_as_completed` | list['''] | AsyncIterator[tuple[int, Union[BaseMessage, Exception]]] | Defaults to running ainvoke in concurrent threads. | - +---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+ + This table provides a brief overview of the main imperative methods. Please see the base `Runnable` reference for full documentation. - This table provides a brief overview of the main imperative methods. Please see the base Runnable reference for full documentation. + | Method | Input | Output | Description | + | ---------------------- | ------------------------------------------------------------ | ---------------------------------------------------------- | -------------------------------------------------------------------------------- | + | `invoke` | `str` \| `list[dict | tuple | BaseMessage]` \| `PromptValue` | `BaseMessage` | A single chat model call. | + | `ainvoke` | `'''` | `BaseMessage` | Defaults to running `invoke` in an async executor. | + | `stream` | `'''` | `Iterator[BaseMessageChunk]` | Defaults to yielding output of `invoke`. | + | `astream` | `'''` | `AsyncIterator[BaseMessageChunk]` | Defaults to yielding output of `ainvoke`. | + | `astream_events` | `'''` | `AsyncIterator[StreamEvent]` | Event types: `on_chat_model_start`, `on_chat_model_stream`, `on_chat_model_end`. | + | `batch` | `list[''']` | `list[BaseMessage]` | Defaults to running `invoke` in concurrent threads. | + | `abatch` | `list[''']` | `list[BaseMessage]` | Defaults to running `ainvoke` in concurrent threads. | + | `batch_as_completed` | `list[''']` | `Iterator[tuple[int, Union[BaseMessage, Exception]]]` | Defaults to running `invoke` in concurrent threads. | + | `abatch_as_completed` | `list[''']` | `AsyncIterator[tuple[int, Union[BaseMessage, Exception]]]` | Defaults to running `ainvoke` in concurrent threads. | Key declarative methods: - Methods for creating another Runnable using the ChatModel. - - +----------------------------------+-----------------------------------------------------------------------------------------------------------+ - | Method | Description | - +==================================+===========================================================================================================+ - | `bind_tools` | Create ChatModel that can call tools. | - +----------------------------------+-----------------------------------------------------------------------------------------------------------+ - | `with_structured_output` | Create wrapper that structures model output using schema. | - +----------------------------------+-----------------------------------------------------------------------------------------------------------+ - | `with_retry` | Create wrapper that retries model calls on failure. | - +----------------------------------+-----------------------------------------------------------------------------------------------------------+ - | `with_fallbacks` | Create wrapper that falls back to other models on failure. | - +----------------------------------+-----------------------------------------------------------------------------------------------------------+ - | `configurable_fields` | Specify init args of the model that can be configured at runtime via the RunnableConfig. | - +----------------------------------+-----------------------------------------------------------------------------------------------------------+ - | `configurable_alternatives` | Specify alternative models which can be swapped in at runtime via the RunnableConfig. | - +----------------------------------+-----------------------------------------------------------------------------------------------------------+ + Methods for creating another `Runnable` using the chat model. This table provides a brief overview of the main declarative methods. Please see the reference for each method for full documentation. + | Method | Description | + | ---------------------------- | ------------------------------------------------------------------------------------------ | + | `bind_tools` | Create chat model that can call tools. | + | `with_structured_output` | Create wrapper that structures model output using schema. | + | `with_retry` | Create wrapper that retries model calls on failure. | + | `with_fallbacks` | Create wrapper that falls back to other models on failure. | + | `configurable_fields` | Specify init args of the model that can be configured at runtime via the `RunnableConfig`. | + | `configurable_alternatives` | Specify alternative models which can be swapped in at runtime via the `RunnableConfig`. | + Creating custom chat model: Custom chat model implementations should inherit from this class. Please reference the table below for information about which methods and properties are required or optional for implementations. - +----------------------------------+--------------------------------------------------------------------+-------------------+ - | Method/Property | Description | Required/Optional | - +==================================+====================================================================+===================+ + | Method/Property | Description | Required | + | -------------------------------- | ------------------------------------------------------------------ | ----------------- | | `_generate` | Use to generate a chat result from a prompt | Required | - +----------------------------------+--------------------------------------------------------------------+-------------------+ | `_llm_type` (property) | Used to uniquely identify the type of the model. Used for logging. | Required | - +----------------------------------+--------------------------------------------------------------------+-------------------+ | `_identifying_params` (property) | Represent model parameterization for tracing purposes. | Optional | - +----------------------------------+--------------------------------------------------------------------+-------------------+ | `_stream` | Use to implement streaming | Optional | - +----------------------------------+--------------------------------------------------------------------+-------------------+ | `_agenerate` | Use to implement a native async method | Optional | - +----------------------------------+--------------------------------------------------------------------+-------------------+ | `_astream` | Use to implement async version of `_stream` | Optional | - +----------------------------------+--------------------------------------------------------------------+-------------------+ - - Follow the guide for more information on how to implement a custom Chat Model: - [Guide](https://python.langchain.com/docs/how_to/custom_chat_model/). """ # noqa: E501 @@ -327,9 +303,9 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC): - If `True`, will always bypass streaming case. - If `'tool_calling'`, will bypass streaming case only when the model is called - with a `tools` keyword argument. In other words, LangChain will automatically - switch to non-streaming behavior (`invoke`) only when the tools argument is - provided. This offers the best of both worlds. + with a `tools` keyword argument. In other words, LangChain will automatically + switch to non-streaming behavior (`invoke`) only when the tools argument is + provided. This offers the best of both worlds. - If `False` (Default), will always use streaming case if available. The main reason for this flag is that code might be written using `stream` and @@ -349,13 +325,14 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC): Supported values: - `'v0'`: provider-specific format in content (can lazily-parse with - `.content_blocks`) - - `'v1'`: standardized format in content (consistent with `.content_blocks`) + `content_blocks`) + - `'v1'`: standardized format in content (consistent with `content_blocks`) - Partner packages (e.g., `langchain-openai`) can also use this field to roll out - new content formats in a backward-compatible way. + Partner packages (e.g., + [`langchain-openai`](https://pypi.org/project/langchain-openai)) can also use this + field to roll out new content formats in a backward-compatible way. - !!! version-added "Added in version 1.0" + !!! version-added "Added in `langchain-core` 1.0" """ @@ -864,24 +841,29 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC): 1. Take advantage of batched calls, 2. Need more output from the model than just the top generated value, 3. Are building chains that are agnostic to the underlying language model - type (e.g., pure text completion models vs chat models). + type (e.g., pure text completion models vs chat models). Args: messages: List of list of messages. - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of these substrings. - callbacks: Callbacks to pass through. Used for executing additional - functionality, such as logging or streaming, throughout generation. + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. + callbacks: `Callbacks` to pass through. + + Used for executing additional functionality, such as logging or + streaming, throughout generation. tags: The tags to apply. metadata: The metadata to apply. run_name: The name of the run. run_id: The ID of the run. - **kwargs: Arbitrary additional keyword arguments. These are usually passed - to the model provider API call. + **kwargs: Arbitrary additional keyword arguments. + + These are usually passed to the model provider API call. Returns: - An LLMResult, which contains a list of candidate Generations for each input - prompt and additional model provider-specific output. + An `LLMResult`, which contains a list of candidate `Generations` for each + input prompt and additional model provider-specific output. """ ls_structured_output_format = kwargs.pop( @@ -982,24 +964,29 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC): 1. Take advantage of batched calls, 2. Need more output from the model than just the top generated value, 3. Are building chains that are agnostic to the underlying language model - type (e.g., pure text completion models vs chat models). + type (e.g., pure text completion models vs chat models). Args: messages: List of list of messages. - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of these substrings. - callbacks: Callbacks to pass through. Used for executing additional - functionality, such as logging or streaming, throughout generation. + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. + callbacks: `Callbacks` to pass through. + + Used for executing additional functionality, such as logging or + streaming, throughout generation. tags: The tags to apply. metadata: The metadata to apply. run_name: The name of the run. run_id: The ID of the run. - **kwargs: Arbitrary additional keyword arguments. These are usually passed - to the model provider API call. + **kwargs: Arbitrary additional keyword arguments. + + These are usually passed to the model provider API call. Returns: - An LLMResult, which contains a list of candidate Generations for each input - prompt and additional model provider-specific output. + An `LLMResult`, which contains a list of candidate `Generations` for each + input prompt and additional model provider-specific output. """ ls_structured_output_format = kwargs.pop( @@ -1528,25 +1515,33 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC): Args: schema: The output schema. Can be passed in as: - - an OpenAI function/tool schema, - - a JSON Schema, - - a `TypedDict` class, - - or a Pydantic class. + - An OpenAI function/tool schema, + - A JSON Schema, + - A `TypedDict` class, + - Or a Pydantic class. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. + + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. Raises: ValueError: If there are any unsupported `kwargs`. @@ -1554,99 +1549,102 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC): `with_structured_output()`. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is False and `schema` is a Pydantic class, Runnable outputs - an instance of `schema` (i.e., a Pydantic object). + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - Otherwise, if `include_raw` is False then Runnable outputs a dict. + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` - If `include_raw` is True, then Runnable outputs a dict with keys: + Example: Pydantic schema (`include_raw=False`): - - `'raw'`: BaseMessage - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None - - Example: Pydantic schema (include_raw=False): - ```python - from pydantic import BaseModel + ```python + from pydantic import BaseModel - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str + answer: str + justification: str - llm = ChatModel(model="model-name", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = ChatModel(model="model-name", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" - ) + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) - # -> AnswerWithJustification( - # answer='They weigh the same', - # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' - # ) - ``` + # -> AnswerWithJustification( + # answer='They weigh the same', + # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' + # ) + ``` - Example: Pydantic schema (include_raw=True): - ```python - from pydantic import BaseModel + Example: Pydantic schema (`include_raw=True`): + + ```python + from pydantic import BaseModel - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str + answer: str + justification: str - llm = ChatModel(model="model-name", temperature=0) - structured_llm = llm.with_structured_output( - AnswerWithJustification, include_raw=True - ) + model = ChatModel(model="model-name", temperature=0) + structured_model = model.with_structured_output( + AnswerWithJustification, include_raw=True + ) - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" - ) - # -> { - # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), - # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), - # 'parsing_error': None - # } - ``` + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) + # -> { + # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), + # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), + # 'parsing_error': None + # } + ``` - Example: Dict schema (include_raw=False): - ```python - from pydantic import BaseModel - from langchain_core.utils.function_calling import convert_to_openai_tool + Example: `dict` schema (`include_raw=False`): + + ```python + from pydantic import BaseModel + from langchain_core.utils.function_calling import convert_to_openai_tool - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str + answer: str + justification: str - dict_schema = convert_to_openai_tool(AnswerWithJustification) - llm = ChatModel(model="model-name", temperature=0) - structured_llm = llm.with_structured_output(dict_schema) + dict_schema = convert_to_openai_tool(AnswerWithJustification) + model = ChatModel(model="model-name", temperature=0) + structured_model = model.with_structured_output(dict_schema) - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" - ) - # -> { - # 'answer': 'They weigh the same', - # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' - # } - ``` + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) + # -> { + # 'answer': 'They weigh the same', + # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' + # } + ``` - !!! warning "Behavior changed in 0.2.26" - Added support for TypedDict class. + !!! warning "Behavior changed in `langchain-core` 0.2.26" + Added support for `TypedDict` class. """ # noqa: E501 _ = kwargs.pop("method", None) @@ -1687,6 +1685,40 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC): return RunnableMap(raw=llm) | parser_with_fallback return llm | output_parser + @property + @beta() + def profile(self) -> ModelProfile: + """Return profiling information for the model. + + This property relies on the `langchain-model-profiles` package to retrieve chat + model capabilities, such as context window sizes and supported features. + + Raises: + ImportError: If `langchain-model-profiles` is not installed. + + Returns: + A `ModelProfile` object containing profiling information for the model. + """ + try: + from langchain_model_profiles import get_model_profile # noqa: PLC0415 + except ImportError as err: + informative_error_message = ( + "To access model profiling information, please install the " + "`langchain-model-profiles` package: " + "`pip install langchain-model-profiles`." + ) + raise ImportError(informative_error_message) from err + + provider_id = self._llm_type + model_name = ( + # Model name is not standardized across integrations. New integrations + # should prefer `model`. + getattr(self, "model", None) + or getattr(self, "model_name", None) + or getattr(self, "model_id", "") + ) + return get_model_profile(provider_id, model_name) or {} + class SimpleChatModel(BaseChatModel): """Simplified implementation for a chat model to inherit from. @@ -1745,9 +1777,12 @@ def _gen_info_and_msg_metadata( } +_MAX_CLEANUP_DEPTH = 100 + + def _cleanup_llm_representation(serialized: Any, depth: int) -> None: """Remove non-serializable objects from a serialized object.""" - if depth > 100: # Don't cooperate for pathological cases + if depth > _MAX_CLEANUP_DEPTH: # Don't cooperate for pathological cases return if not isinstance(serialized, dict): diff --git a/libs/core/langchain_core/language_models/fake_chat_models.py b/libs/core/langchain_core/language_models/fake_chat_models.py index 340f8ad3026..7ffb5896013 100644 --- a/libs/core/langchain_core/language_models/fake_chat_models.py +++ b/libs/core/langchain_core/language_models/fake_chat_models.py @@ -1,4 +1,4 @@ -"""Fake ChatModel for testing purposes.""" +"""Fake chat models for testing purposes.""" import asyncio import re @@ -19,7 +19,7 @@ from langchain_core.runnables import RunnableConfig class FakeMessagesListChatModel(BaseChatModel): - """Fake `ChatModel` for testing purposes.""" + """Fake chat model for testing purposes.""" responses: list[BaseMessage] """List of responses to **cycle** through in order.""" @@ -57,7 +57,7 @@ class FakeListChatModelError(Exception): class FakeListChatModel(SimpleChatModel): - """Fake ChatModel for testing purposes.""" + """Fake chat model for testing purposes.""" responses: list[str] """List of responses to **cycle** through in order.""" diff --git a/libs/core/langchain_core/language_models/llms.py b/libs/core/langchain_core/language_models/llms.py index e81d40832a9..813ae7b21b9 100644 --- a/libs/core/langchain_core/language_models/llms.py +++ b/libs/core/langchain_core/language_models/llms.py @@ -1,4 +1,7 @@ -"""Base interface for large language models to expose.""" +"""Base interface for traditional large language models (LLMs) to expose. + +These are traditionally older models (newer models generally are chat models). +""" from __future__ import annotations @@ -74,8 +77,8 @@ def create_base_retry_decorator( Args: error_types: List of error types to retry on. - max_retries: Number of retries. Default is 1. - run_manager: Callback manager for the run. Default is None. + max_retries: Number of retries. + run_manager: Callback manager for the run. Returns: A retry decorator. @@ -91,13 +94,17 @@ def create_base_retry_decorator( if isinstance(run_manager, AsyncCallbackManagerForLLMRun): coro = run_manager.on_retry(retry_state) try: - loop = asyncio.get_event_loop() - if loop.is_running(): - # TODO: Fix RUF006 - this task should have a reference - # and be awaited somewhere - loop.create_task(coro) # noqa: RUF006 - else: + try: + loop = asyncio.get_event_loop() + except RuntimeError: asyncio.run(coro) + else: + if loop.is_running(): + # TODO: Fix RUF006 - this task should have a reference + # and be awaited somewhere + loop.create_task(coro) # noqa: RUF006 + else: + asyncio.run(coro) except Exception as e: _log_error_once(f"Error in on_retry: {e}") else: @@ -153,7 +160,7 @@ def get_prompts( Args: params: Dictionary of parameters. prompts: List of prompts. - cache: Cache object. Default is None. + cache: Cache object. Returns: A tuple of existing prompts, llm_string, missing prompt indexes, @@ -189,7 +196,7 @@ async def aget_prompts( Args: params: Dictionary of parameters. prompts: List of prompts. - cache: Cache object. Default is None. + cache: Cache object. Returns: A tuple of existing prompts, llm_string, missing prompt indexes, @@ -644,9 +651,12 @@ class BaseLLM(BaseLanguageModel[str], ABC): Args: prompts: The prompts to generate from. - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of the stop substrings. - If stop tokens are not supported consider raising NotImplementedError. + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. + + If stop tokens are not supported consider raising `NotImplementedError`. run_manager: Callback manager for the run. Returns: @@ -664,9 +674,12 @@ class BaseLLM(BaseLanguageModel[str], ABC): Args: prompts: The prompts to generate from. - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of the stop substrings. - If stop tokens are not supported consider raising NotImplementedError. + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. + + If stop tokens are not supported consider raising `NotImplementedError`. run_manager: Callback manager for the run. Returns: @@ -698,11 +711,14 @@ class BaseLLM(BaseLanguageModel[str], ABC): Args: prompt: The prompt to generate from. - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of these substrings. + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. run_manager: Callback manager for the run. - **kwargs: Arbitrary additional keyword arguments. These are usually passed - to the model provider API call. + **kwargs: Arbitrary additional keyword arguments. + + These are usually passed to the model provider API call. Yields: Generation chunks. @@ -724,11 +740,14 @@ class BaseLLM(BaseLanguageModel[str], ABC): Args: prompt: The prompt to generate from. - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of these substrings. + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. run_manager: Callback manager for the run. - **kwargs: Arbitrary additional keyword arguments. These are usually passed - to the model provider API call. + **kwargs: Arbitrary additional keyword arguments. + + These are usually passed to the model provider API call. Yields: Generation chunks. @@ -839,10 +858,14 @@ class BaseLLM(BaseLanguageModel[str], ABC): Args: prompts: List of string prompts. - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of these substrings. - callbacks: Callbacks to pass through. Used for executing additional - functionality, such as logging or streaming, throughout generation. + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. + callbacks: `Callbacks` to pass through. + + Used for executing additional functionality, such as logging or + streaming, throughout generation. tags: List of tags to associate with each prompt. If provided, the length of the list must match the length of the prompts list. metadata: List of metadata dictionaries to associate with each prompt. If @@ -852,8 +875,9 @@ class BaseLLM(BaseLanguageModel[str], ABC): length of the list must match the length of the prompts list. run_id: List of run IDs to associate with each prompt. If provided, the length of the list must match the length of the prompts list. - **kwargs: Arbitrary additional keyword arguments. These are usually passed - to the model provider API call. + **kwargs: Arbitrary additional keyword arguments. + + These are usually passed to the model provider API call. Raises: ValueError: If prompts is not a list. @@ -861,8 +885,8 @@ class BaseLLM(BaseLanguageModel[str], ABC): `run_name` (if provided) does not match the length of prompts. Returns: - An LLMResult, which contains a list of candidate Generations for each input - prompt and additional model provider-specific output. + An `LLMResult`, which contains a list of candidate `Generations` for each + input prompt and additional model provider-specific output. """ if not isinstance(prompts, list): msg = ( @@ -1109,10 +1133,14 @@ class BaseLLM(BaseLanguageModel[str], ABC): Args: prompts: List of string prompts. - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of these substrings. - callbacks: Callbacks to pass through. Used for executing additional - functionality, such as logging or streaming, throughout generation. + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. + callbacks: `Callbacks` to pass through. + + Used for executing additional functionality, such as logging or + streaming, throughout generation. tags: List of tags to associate with each prompt. If provided, the length of the list must match the length of the prompts list. metadata: List of metadata dictionaries to associate with each prompt. If @@ -1122,16 +1150,17 @@ class BaseLLM(BaseLanguageModel[str], ABC): length of the list must match the length of the prompts list. run_id: List of run IDs to associate with each prompt. If provided, the length of the list must match the length of the prompts list. - **kwargs: Arbitrary additional keyword arguments. These are usually passed - to the model provider API call. + **kwargs: Arbitrary additional keyword arguments. + + These are usually passed to the model provider API call. Raises: ValueError: If the length of `callbacks`, `tags`, `metadata`, or `run_name` (if provided) does not match the length of prompts. Returns: - An LLMResult, which contains a list of candidate Generations for each input - prompt and additional model provider-specific output. + An `LLMResult`, which contains a list of candidate `Generations` for each + input prompt and additional model provider-specific output. """ if isinstance(metadata, list): metadata = [ @@ -1387,11 +1416,6 @@ class LLM(BaseLLM): `astream` will use `_astream` if provided, otherwise it will implement a fallback behavior that will use `_stream` if `_stream` is implemented, and use `_acall` if `_stream` is not implemented. - - Please see the following guide for more information on how to - implement a custom LLM: - - https://python.langchain.com/docs/how_to/custom_llm/ """ @abstractmethod @@ -1408,12 +1432,16 @@ class LLM(BaseLLM): Args: prompt: The prompt to generate from. - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of the stop substrings. - If stop tokens are not supported consider raising NotImplementedError. + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. + + If stop tokens are not supported consider raising `NotImplementedError`. run_manager: Callback manager for the run. - **kwargs: Arbitrary additional keyword arguments. These are usually passed - to the model provider API call. + **kwargs: Arbitrary additional keyword arguments. + + These are usually passed to the model provider API call. Returns: The model output as a string. SHOULD NOT include the prompt. @@ -1434,12 +1462,16 @@ class LLM(BaseLLM): Args: prompt: The prompt to generate from. - stop: Stop words to use when generating. Model output is cut off at the - first occurrence of any of the stop substrings. - If stop tokens are not supported consider raising NotImplementedError. + stop: Stop words to use when generating. + + Model output is cut off at the first occurrence of any of these + substrings. + + If stop tokens are not supported consider raising `NotImplementedError`. run_manager: Callback manager for the run. - **kwargs: Arbitrary additional keyword arguments. These are usually passed - to the model provider API call. + **kwargs: Arbitrary additional keyword arguments. + + These are usually passed to the model provider API call. Returns: The model output as a string. SHOULD NOT include the prompt. diff --git a/libs/core/langchain_core/load/dump.py b/libs/core/langchain_core/load/dump.py index 01c886d33bd..4cb9ca59892 100644 --- a/libs/core/langchain_core/load/dump.py +++ b/libs/core/langchain_core/load/dump.py @@ -17,7 +17,7 @@ def default(obj: Any) -> Any: obj: The object to serialize to json if it is a Serializable object. Returns: - A json serializable object or a SerializedNotImplemented object. + A JSON serializable object or a SerializedNotImplemented object. """ if isinstance(obj, Serializable): return obj.to_json() @@ -38,17 +38,16 @@ def _dump_pydantic_models(obj: Any) -> Any: def dumps(obj: Any, *, pretty: bool = False, **kwargs: Any) -> str: - """Return a json string representation of an object. + """Return a JSON string representation of an object. Args: obj: The object to dump. - pretty: Whether to pretty print the json. If true, the json will be - indented with 2 spaces (if no indent is provided as part of kwargs). - Default is False. - **kwargs: Additional arguments to pass to json.dumps + pretty: Whether to pretty print the json. If `True`, the json will be + indented with 2 spaces (if no indent is provided as part of `kwargs`). + **kwargs: Additional arguments to pass to `json.dumps` Returns: - A json string representation of the object. + A JSON string representation of the object. Raises: ValueError: If `default` is passed as a kwarg. @@ -72,14 +71,12 @@ def dumps(obj: Any, *, pretty: bool = False, **kwargs: Any) -> str: def dumpd(obj: Any) -> Any: """Return a dict representation of an object. - !!! note - Unfortunately this function is not as efficient as it could be because it first - dumps the object to a json string and then loads it back into a dictionary. - Args: obj: The object to dump. Returns: - dictionary that can be serialized to json using json.dumps + Dictionary that can be serialized to json using `json.dumps`. """ + # Unfortunately this function is not as efficient as it could be because it first + # dumps the object to a json string and then loads it back into a dictionary. return json.loads(dumps(obj)) diff --git a/libs/core/langchain_core/load/load.py b/libs/core/langchain_core/load/load.py index 7b71760cf21..ed832e69dbb 100644 --- a/libs/core/langchain_core/load/load.py +++ b/libs/core/langchain_core/load/load.py @@ -63,16 +63,13 @@ class Reviver: Args: secrets_map: A map of secrets to load. If a secret is not found in the map, it will be loaded from the environment if `secrets_from_env` - is True. Defaults to `None`. + is True. valid_namespaces: A list of additional namespaces (modules) - to allow to be deserialized. Defaults to `None`. + to allow to be deserialized. secrets_from_env: Whether to load secrets from the environment. - Defaults to `True`. additional_import_mappings: A dictionary of additional namespace mappings You can use this to override default mappings or add new mappings. - Defaults to `None`. ignore_unserializable_fields: Whether to ignore unserializable fields. - Defaults to `False`. """ self.secrets_from_env = secrets_from_env self.secrets_map = secrets_map or {} @@ -200,16 +197,13 @@ def loads( text: The string to load. secrets_map: A map of secrets to load. If a secret is not found in the map, it will be loaded from the environment if `secrets_from_env` - is True. Defaults to `None`. + is True. valid_namespaces: A list of additional namespaces (modules) - to allow to be deserialized. Defaults to `None`. + to allow to be deserialized. secrets_from_env: Whether to load secrets from the environment. - Defaults to `True`. additional_import_mappings: A dictionary of additional namespace mappings You can use this to override default mappings or add new mappings. - Defaults to `None`. ignore_unserializable_fields: Whether to ignore unserializable fields. - Defaults to `False`. Returns: Revived LangChain objects. @@ -245,16 +239,13 @@ def load( obj: The object to load. secrets_map: A map of secrets to load. If a secret is not found in the map, it will be loaded from the environment if `secrets_from_env` - is True. Defaults to `None`. + is True. valid_namespaces: A list of additional namespaces (modules) - to allow to be deserialized. Defaults to `None`. + to allow to be deserialized. secrets_from_env: Whether to load secrets from the environment. - Defaults to `True`. additional_import_mappings: A dictionary of additional namespace mappings You can use this to override default mappings or add new mappings. - Defaults to `None`. ignore_unserializable_fields: Whether to ignore unserializable fields. - Defaults to `False`. Returns: Revived LangChain objects. diff --git a/libs/core/langchain_core/load/serializable.py b/libs/core/langchain_core/load/serializable.py index f4f8c0417b7..9c7588c185b 100644 --- a/libs/core/langchain_core/load/serializable.py +++ b/libs/core/langchain_core/load/serializable.py @@ -25,9 +25,9 @@ class BaseSerialized(TypedDict): id: list[str] """The unique identifier of the object.""" name: NotRequired[str] - """The name of the object. Optional.""" + """The name of the object.""" graph: NotRequired[dict[str, Any]] - """The graph of the object. Optional.""" + """The graph of the object.""" class SerializedConstructor(BaseSerialized): @@ -52,7 +52,7 @@ class SerializedNotImplemented(BaseSerialized): type: Literal["not_implemented"] """The type of the object. Must be `'not_implemented'`.""" repr: str | None - """The representation of the object. Optional.""" + """The representation of the object.""" def try_neq_default(value: Any, key: str, model: BaseModel) -> bool: @@ -61,7 +61,7 @@ def try_neq_default(value: Any, key: str, model: BaseModel) -> bool: Args: value: The value. key: The key. - model: The pydantic model. + model: The Pydantic model. Returns: Whether the value is different from the default. @@ -93,18 +93,21 @@ class Serializable(BaseModel, ABC): It relies on the following methods and properties: - `is_lc_serializable`: Is this class serializable? - By design, even if a class inherits from Serializable, it is not serializable by - default. This is to prevent accidental serialization of objects that should not - be serialized. - - `get_lc_namespace`: Get the namespace of the langchain object. + By design, even if a class inherits from `Serializable`, it is not serializable + by default. This is to prevent accidental serialization of objects that should + not be serialized. + - `get_lc_namespace`: Get the namespace of the LangChain object. + During deserialization, this namespace is used to identify the correct class to instantiate. + Please see the `Reviver` class in `langchain_core.load.load` for more details. - During deserialization an additional mapping is handle - classes that have moved or been renamed across package versions. + During deserialization an additional mapping is handle classes that have moved + or been renamed across package versions. + - `lc_secrets`: A map of constructor argument names to secret ids. - `lc_attributes`: List of additional attribute names that should be included - as part of the serialized representation. + as part of the serialized representation. """ # Remove default BaseModel init docstring. @@ -116,24 +119,24 @@ class Serializable(BaseModel, ABC): def is_lc_serializable(cls) -> bool: """Is this class serializable? - By design, even if a class inherits from Serializable, it is not serializable by - default. This is to prevent accidental serialization of objects that should not - be serialized. + By design, even if a class inherits from `Serializable`, it is not serializable + by default. This is to prevent accidental serialization of objects that should + not be serialized. Returns: - Whether the class is serializable. Default is False. + Whether the class is serializable. Default is `False`. """ return False @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. For example, if the class is `langchain.llms.openai.OpenAI`, then the - namespace is ["langchain", "llms", "openai"] + namespace is `["langchain", "llms", "openai"]` Returns: - The namespace as a list of strings. + The namespace. """ return cls.__module__.split(".") @@ -141,8 +144,7 @@ class Serializable(BaseModel, ABC): def lc_secrets(self) -> dict[str, str]: """A map of constructor argument names to secret ids. - For example, - {"openai_api_key": "OPENAI_API_KEY"} + For example, `{"openai_api_key": "OPENAI_API_KEY"}` """ return {} @@ -151,6 +153,7 @@ class Serializable(BaseModel, ABC): """List of attribute names that should be included in the serialized kwargs. These attributes must be accepted by the constructor. + Default is an empty dictionary. """ return {} @@ -194,7 +197,7 @@ class Serializable(BaseModel, ABC): ValueError: If the class has deprecated attributes. Returns: - A json serializable object or a SerializedNotImplemented object. + A JSON serializable object or a `SerializedNotImplemented` object. """ if not self.is_lc_serializable(): return self.to_json_not_implemented() @@ -269,7 +272,7 @@ class Serializable(BaseModel, ABC): """Serialize a "not implemented" object. Returns: - SerializedNotImplemented. + `SerializedNotImplemented`. """ return to_json_not_implemented(self) @@ -284,8 +287,8 @@ def _is_field_useful(inst: Serializable, key: str, value: Any) -> bool: Returns: Whether the field is useful. If the field is required, it is useful. - If the field is not required, it is useful if the value is not None. - If the field is not required and the value is None, it is useful if the + If the field is not required, it is useful if the value is not `None`. + If the field is not required and the value is `None`, it is useful if the default value is different from the value. """ field = type(inst).model_fields.get(key) @@ -344,10 +347,10 @@ def to_json_not_implemented(obj: object) -> SerializedNotImplemented: """Serialize a "not implemented" object. Args: - obj: object to serialize. + obj: Object to serialize. Returns: - SerializedNotImplemented + `SerializedNotImplemented` """ id_: list[str] = [] try: diff --git a/libs/core/langchain_core/messages/__init__.py b/libs/core/langchain_core/messages/__init__.py index 183aff7fae7..97171f56b16 100644 --- a/libs/core/langchain_core/messages/__init__.py +++ b/libs/core/langchain_core/messages/__init__.py @@ -9,6 +9,9 @@ if TYPE_CHECKING: from langchain_core.messages.ai import ( AIMessage, AIMessageChunk, + InputTokenDetails, + OutputTokenDetails, + UsageMetadata, ) from langchain_core.messages.base import ( BaseMessage, @@ -87,10 +90,12 @@ __all__ = ( "HumanMessage", "HumanMessageChunk", "ImageContentBlock", + "InputTokenDetails", "InvalidToolCall", "MessageLikeRepresentation", "NonStandardAnnotation", "NonStandardContentBlock", + "OutputTokenDetails", "PlainTextContentBlock", "ReasoningContentBlock", "RemoveMessage", @@ -104,6 +109,7 @@ __all__ = ( "ToolCallChunk", "ToolMessage", "ToolMessageChunk", + "UsageMetadata", "VideoContentBlock", "_message_from_dict", "convert_to_messages", @@ -145,6 +151,7 @@ _dynamic_imports = { "HumanMessageChunk": "human", "NonStandardAnnotation": "content", "NonStandardContentBlock": "content", + "OutputTokenDetails": "ai", "PlainTextContentBlock": "content", "ReasoningContentBlock": "content", "RemoveMessage": "modifier", @@ -154,12 +161,14 @@ _dynamic_imports = { "SystemMessage": "system", "SystemMessageChunk": "system", "ImageContentBlock": "content", + "InputTokenDetails": "ai", "InvalidToolCall": "tool", "TextContentBlock": "content", "ToolCall": "tool", "ToolCallChunk": "tool", "ToolMessage": "tool", "ToolMessageChunk": "tool", + "UsageMetadata": "ai", "VideoContentBlock": "content", "AnyMessage": "utils", "MessageLikeRepresentation": "utils", diff --git a/libs/core/langchain_core/messages/ai.py b/libs/core/langchain_core/messages/ai.py index 011c1d2ed46..fb85027b142 100644 --- a/libs/core/langchain_core/messages/ai.py +++ b/libs/core/langchain_core/messages/ai.py @@ -48,10 +48,10 @@ class InputTokenDetails(TypedDict, total=False): } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + !!! version-added "Added in `langchain-core` 0.3.9" + """ audio: int @@ -83,7 +83,9 @@ class OutputTokenDetails(TypedDict, total=False): } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" """ @@ -121,9 +123,13 @@ class UsageMetadata(TypedDict): } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. """ input_tokens: int @@ -131,7 +137,7 @@ class UsageMetadata(TypedDict): output_tokens: int """Count of output (or completion) tokens. Sum of all output token types.""" total_tokens: int - """Total token count. Sum of input_tokens + output_tokens.""" + """Total token count. Sum of `input_tokens` + `output_tokens`.""" input_token_details: NotRequired[InputTokenDetails] """Breakdown of input token counts. @@ -141,34 +147,31 @@ class UsageMetadata(TypedDict): """Breakdown of output token counts. Does *not* need to sum to full output token count. Does *not* need to have all keys. - """ class AIMessage(BaseMessage): """Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. - """ tool_calls: list[ToolCall] = [] - """If provided, tool calls associated with the message.""" + """If present, tool calls associated with the message.""" invalid_tool_calls: list[InvalidToolCall] = [] - """If provided, tool calls with parsing errors associated with the message.""" + """If present, tool calls with parsing errors associated with the message.""" usage_metadata: UsageMetadata | None = None - """If provided, usage metadata for a message, such as token counts. + """If present, usage metadata for a message, such as token counts. This is a standard representation of token usage that is consistent across models. - """ type: Literal["ai"] = "ai" - """The type of the message (used for deserialization). Defaults to "ai".""" + """The type of the message (used for deserialization).""" @overload def __init__( @@ -191,7 +194,7 @@ class AIMessage(BaseMessage): content_blocks: list[types.ContentBlock] | None = None, **kwargs: Any, ) -> None: - """Initialize `AIMessage`. + """Initialize an `AIMessage`. Specify `content` as positional arg or `content_blocks` for typing. @@ -217,7 +220,11 @@ class AIMessage(BaseMessage): @property def lc_attributes(self) -> dict: - """Attrs to be serialized even if they are derived from other init args.""" + """Attributes to be serialized. + + Includes all attributes, even if they are derived from other initialization + arguments. + """ return { "tool_calls": self.tool_calls, "invalid_tool_calls": self.invalid_tool_calls, @@ -225,7 +232,7 @@ class AIMessage(BaseMessage): @property def content_blocks(self) -> list[types.ContentBlock]: - """Return content blocks of the message. + """Return standard, typed `ContentBlock` dicts from the message. If the message has a known model provider, use the provider-specific translator first before falling back to best-effort parsing. For details, see the property @@ -331,11 +338,10 @@ class AIMessage(BaseMessage): @override def pretty_repr(self, html: bool = False) -> str: - """Return a pretty representation of the message. + """Return a pretty representation of the message for display. Args: html: Whether to return an HTML-formatted string. - Defaults to `False`. Returns: A pretty representation of the message. @@ -372,23 +378,19 @@ class AIMessage(BaseMessage): class AIMessageChunk(AIMessage, BaseMessageChunk): - """Message chunk from an AI.""" + """Message chunk from an AI (yielded when streaming).""" # Ignoring mypy re-assignment here since we're overriding the value # to make sure that the chunk variant can be discriminated from the # non-chunk variant. type: Literal["AIMessageChunk"] = "AIMessageChunk" # type: ignore[assignment] - """The type of the message (used for deserialization). - - Defaults to `AIMessageChunk`. - - """ + """The type of the message (used for deserialization).""" tool_call_chunks: list[ToolCallChunk] = [] """If provided, tool call chunks associated with the message.""" chunk_position: Literal["last"] | None = None - """Optional span represented by an aggregated AIMessageChunk. + """Optional span represented by an aggregated `AIMessageChunk`. If a chunk with `chunk_position="last"` is aggregated into a stream, `tool_call_chunks` in message content will be parsed into `tool_calls`. @@ -396,7 +398,7 @@ class AIMessageChunk(AIMessage, BaseMessageChunk): @property def lc_attributes(self) -> dict: - """Attrs to be serialized even if they are derived from other init args.""" + """Attributes to be serialized, even if they are derived from other initialization args.""" # noqa: E501 return { "tool_calls": self.tool_calls, "invalid_tool_calls": self.invalid_tool_calls, @@ -404,7 +406,7 @@ class AIMessageChunk(AIMessage, BaseMessageChunk): @property def content_blocks(self) -> list[types.ContentBlock]: - """Return content blocks of the message.""" + """Return standard, typed `ContentBlock` dicts from the message.""" if self.response_metadata.get("output_version") == "v1": return cast("list[types.ContentBlock]", self.content) @@ -545,12 +547,15 @@ class AIMessageChunk(AIMessage, BaseMessageChunk): and call_id in id_to_tc ): self.content[idx] = cast("dict[str, Any]", id_to_tc[call_id]) + if "extras" in block: + # mypy does not account for instance check for dict above + self.content[idx]["extras"] = block["extras"] # type: ignore[index] return self @model_validator(mode="after") def init_server_tool_calls(self) -> Self: - """Parse server_tool_call_chunks.""" + """Parse `server_tool_call_chunks`.""" if ( self.chunk_position == "last" and self.response_metadata.get("output_version") == "v1" @@ -650,13 +655,13 @@ def add_ai_message_chunks( chunk_id = id_ break else: - # second pass: prefer lc_run-* ids over lc_* ids + # second pass: prefer lc_run-* IDs over lc_* IDs for id_ in candidates: if id_ and id_.startswith(LC_ID_PREFIX): chunk_id = id_ break else: - # third pass: take any remaining id (auto-generated lc_* ids) + # third pass: take any remaining ID (auto-generated lc_* IDs) for id_ in candidates: if id_: chunk_id = id_ diff --git a/libs/core/langchain_core/messages/base.py b/libs/core/langchain_core/messages/base.py index 0a735eb2829..05ce4e57fd1 100644 --- a/libs/core/langchain_core/messages/base.py +++ b/libs/core/langchain_core/messages/base.py @@ -92,11 +92,15 @@ class TextAccessor(str): class BaseMessage(Serializable): """Base abstract message class. - Messages are the inputs and outputs of a `ChatModel`. + Messages are the inputs and outputs of a chat model. + + Examples include [`HumanMessage`][langchain.messages.HumanMessage], + [`AIMessage`][langchain.messages.AIMessage], and + [`SystemMessage`][langchain.messages.SystemMessage]. """ content: str | list[str | dict] - """The string contents of the message.""" + """The contents of the message.""" additional_kwargs: dict = Field(default_factory=dict) """Reserved for additional payload data associated with the message. @@ -159,12 +163,12 @@ class BaseMessage(Serializable): content_blocks: list[types.ContentBlock] | None = None, **kwargs: Any, ) -> None: - """Initialize `BaseMessage`. + """Initialize a `BaseMessage`. Specify `content` as positional arg or `content_blocks` for typing. Args: - content: The string contents of the message. + content: The contents of the message. content_blocks: Typed standard content. **kwargs: Additional arguments to pass to the parent class. """ @@ -184,7 +188,7 @@ class BaseMessage(Serializable): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "messages"]` @@ -195,7 +199,7 @@ class BaseMessage(Serializable): def content_blocks(self) -> list[types.ContentBlock]: r"""Load content blocks from the message content. - !!! version-added "Added in version 1.0.0" + !!! version-added "Added in `langchain-core` 1.0.0" """ # Needed here to avoid circular import, as these classes import BaseMessages @@ -262,7 +266,7 @@ class BaseMessage(Serializable): Can be used as both property (`message.text`) and method (`message.text()`). !!! deprecated - As of langchain-core 1.0.0, calling `.text()` as a method is deprecated. + As of `langchain-core` 1.0.0, calling `.text()` as a method is deprecated. Use `.text` as a property instead. This method will be removed in 2.0.0. Returns: @@ -307,7 +311,7 @@ class BaseMessage(Serializable): Args: html: Whether to format the message as HTML. If `True`, the message will be - formatted with HTML tags. Default is False. + formatted with HTML tags. Returns: A pretty representation of the message. @@ -464,7 +468,7 @@ def get_msg_title_repr(title: str, *, bold: bool = False) -> str: Args: title: The title. - bold: Whether to bold the title. Default is False. + bold: Whether to bold the title. Returns: The title representation. diff --git a/libs/core/langchain_core/messages/block_translators/__init__.py b/libs/core/langchain_core/messages/block_translators/__init__.py index 11419fd5b81..9d0c67ea7d2 100644 --- a/libs/core/langchain_core/messages/block_translators/__init__.py +++ b/libs/core/langchain_core/messages/block_translators/__init__.py @@ -28,7 +28,7 @@ dictionary with two keys: - `'translate_content'`: Function to translate `AIMessage` content. - `'translate_content_chunk'`: Function to translate `AIMessageChunk` content. -When calling `.content_blocks` on an `AIMessage` or `AIMessageChunk`, if +When calling `content_blocks` on an `AIMessage` or `AIMessageChunk`, if `model_provider` is set in `response_metadata`, the corresponding translator functions will be used to parse the content into blocks. Otherwise, best-effort parsing in `BaseMessage` will be used. diff --git a/libs/core/langchain_core/messages/block_translators/anthropic.py b/libs/core/langchain_core/messages/block_translators/anthropic.py index 87f9df8a392..c5178be45b9 100644 --- a/libs/core/langchain_core/messages/block_translators/anthropic.py +++ b/libs/core/langchain_core/messages/block_translators/anthropic.py @@ -31,7 +31,7 @@ def _convert_to_v1_from_anthropic_input( ) -> list[types.ContentBlock]: """Convert Anthropic format blocks to v1 format. - During the `.content_blocks` parsing process, we wrap blocks not recognized as a v1 + During the `content_blocks` parsing process, we wrap blocks not recognized as a v1 block as a `'non_standard'` block with the original block stored in the `value` field. This function attempts to unpack those blocks and convert any blocks that might be Anthropic format to v1 ContentBlocks. diff --git a/libs/core/langchain_core/messages/block_translators/bedrock_converse.py b/libs/core/langchain_core/messages/block_translators/bedrock_converse.py index 6d5e517e49f..c44ef6ca535 100644 --- a/libs/core/langchain_core/messages/block_translators/bedrock_converse.py +++ b/libs/core/langchain_core/messages/block_translators/bedrock_converse.py @@ -35,7 +35,7 @@ def _convert_to_v1_from_converse_input( ) -> list[types.ContentBlock]: """Convert Bedrock Converse format blocks to v1 format. - During the `.content_blocks` parsing process, we wrap blocks not recognized as a v1 + During the `content_blocks` parsing process, we wrap blocks not recognized as a v1 block as a `'non_standard'` block with the original block stored in the `value` field. This function attempts to unpack those blocks and convert any blocks that might be Converse format to v1 ContentBlocks. diff --git a/libs/core/langchain_core/messages/block_translators/google_genai.py b/libs/core/langchain_core/messages/block_translators/google_genai.py index 8380a267a52..2a82f035c23 100644 --- a/libs/core/langchain_core/messages/block_translators/google_genai.py +++ b/libs/core/langchain_core/messages/block_translators/google_genai.py @@ -105,7 +105,7 @@ def _convert_to_v1_from_genai_input( Called when message isn't an `AIMessage` or `model_provider` isn't set on `response_metadata`. - During the `.content_blocks` parsing process, we wrap blocks not recognized as a v1 + During the `content_blocks` parsing process, we wrap blocks not recognized as a v1 block as a `'non_standard'` block with the original block stored in the `value` field. This function attempts to unpack those blocks and convert any blocks that might be GenAI format to v1 ContentBlocks. @@ -282,7 +282,7 @@ def _convert_to_v1_from_genai(message: AIMessage) -> list[types.ContentBlock]: standard content blocks for returning. Args: - message: The AIMessage or AIMessageChunk to convert. + message: The `AIMessage` or `AIMessageChunk` to convert. Returns: List of standard content blocks derived from the message content. @@ -368,7 +368,7 @@ def _convert_to_v1_from_genai(message: AIMessage) -> list[types.ContentBlock]: else: # Assume it's raw base64 without data URI try: - # Validate base64 and decode for mime type detection + # Validate base64 and decode for MIME type detection decoded_bytes = base64.b64decode(url, validate=True) image_url_b64_block = { @@ -379,7 +379,7 @@ def _convert_to_v1_from_genai(message: AIMessage) -> list[types.ContentBlock]: try: import filetype # type: ignore[import-not-found] # noqa: PLC0415 - # Guess mime type based on file bytes + # Guess MIME type based on file bytes mime_type = None kind = filetype.guess(decoded_bytes) if kind: @@ -453,10 +453,13 @@ def _convert_to_v1_from_genai(message: AIMessage) -> list[types.ContentBlock]: "status": status, # type: ignore[typeddict-item] "output": item.get("code_execution_result", ""), } + server_tool_result_block["extras"] = {"block_type": item_type} # Preserve original outcome in extras if outcome is not None: - server_tool_result_block["extras"] = {"outcome": outcome} + server_tool_result_block["extras"]["outcome"] = outcome converted_blocks.append(server_tool_result_block) + elif item_type == "text": + converted_blocks.append(cast("types.TextContentBlock", item)) else: # Unknown type, preserve as non-standard converted_blocks.append({"type": "non_standard", "value": item}) diff --git a/libs/core/langchain_core/messages/block_translators/google_vertexai.py b/libs/core/langchain_core/messages/block_translators/google_vertexai.py index f4e8f7ec0ab..016f146164e 100644 --- a/libs/core/langchain_core/messages/block_translators/google_vertexai.py +++ b/libs/core/langchain_core/messages/block_translators/google_vertexai.py @@ -1,37 +1,9 @@ """Derivations of standard content blocks from Google (VertexAI) content.""" -import warnings - -from langchain_core.messages import AIMessage, AIMessageChunk -from langchain_core.messages import content as types - -WARNED = False - - -def translate_content(message: AIMessage) -> list[types.ContentBlock]: # noqa: ARG001 - """Derive standard content blocks from a message with Google (VertexAI) content.""" - global WARNED # noqa: PLW0603 - if not WARNED: - warning_message = ( - "Content block standardization is not yet fully supported for Google " - "VertexAI." - ) - warnings.warn(warning_message, stacklevel=2) - WARNED = True - raise NotImplementedError - - -def translate_content_chunk(message: AIMessageChunk) -> list[types.ContentBlock]: # noqa: ARG001 - """Derive standard content blocks from a chunk with Google (VertexAI) content.""" - global WARNED # noqa: PLW0603 - if not WARNED: - warning_message = ( - "Content block standardization is not yet fully supported for Google " - "VertexAI." - ) - warnings.warn(warning_message, stacklevel=2) - WARNED = True - raise NotImplementedError +from langchain_core.messages.block_translators.google_genai import ( + translate_content, + translate_content_chunk, +) def _register_google_vertexai_translator() -> None: diff --git a/libs/core/langchain_core/messages/block_translators/groq.py b/libs/core/langchain_core/messages/block_translators/groq.py index 958de52280a..33f1921ddc8 100644 --- a/libs/core/langchain_core/messages/block_translators/groq.py +++ b/libs/core/langchain_core/messages/block_translators/groq.py @@ -1,39 +1,135 @@ """Derivations of standard content blocks from Groq content.""" -import warnings +import json +import re +from typing import Any from langchain_core.messages import AIMessage, AIMessageChunk from langchain_core.messages import content as types - -WARNED = False +from langchain_core.messages.base import _extract_reasoning_from_additional_kwargs -def translate_content(message: AIMessage) -> list[types.ContentBlock]: # noqa: ARG001 - """Derive standard content blocks from a message with Groq content.""" - global WARNED # noqa: PLW0603 - if not WARNED: - warning_message = ( - "Content block standardization is not yet fully supported for Groq." +def _populate_extras( + standard_block: types.ContentBlock, block: dict[str, Any], known_fields: set[str] +) -> types.ContentBlock: + """Mutate a block, populating extras.""" + if standard_block.get("type") == "non_standard": + return standard_block + + for key, value in block.items(): + if key not in known_fields: + if "extras" not in standard_block: + # Below type-ignores are because mypy thinks a non-standard block can + # get here, although we exclude them above. + standard_block["extras"] = {} # type: ignore[typeddict-unknown-key] + standard_block["extras"][key] = value # type: ignore[typeddict-item] + + return standard_block + + +def _parse_code_json(s: str) -> dict: + """Extract Python code from Groq built-in tool content. + + Extracts the value of the 'code' field from a string of the form: + {"code": some_arbitrary_text_with_unescaped_quotes} + + As Groq may not escape quotes in the executed tools, e.g.: + ``` + '{"code": "import math; print("The square root of 101 is: "); print(math.sqrt(101))"}' + ``` + """ # noqa: E501 + m = re.fullmatch(r'\s*\{\s*"code"\s*:\s*"(.*)"\s*\}\s*', s, flags=re.DOTALL) + if not m: + msg = ( + "Could not extract Python code from Groq tool arguments. " + "Expected a JSON object with a 'code' field." ) - warnings.warn(warning_message, stacklevel=2) - WARNED = True - raise NotImplementedError + raise ValueError(msg) + return {"code": m.group(1)} -def translate_content_chunk(message: AIMessageChunk) -> list[types.ContentBlock]: # noqa: ARG001 - """Derive standard content blocks from a message chunk with Groq content.""" - global WARNED # noqa: PLW0603 - if not WARNED: - warning_message = ( - "Content block standardization is not yet fully supported for Groq." +def _convert_to_v1_from_groq(message: AIMessage) -> list[types.ContentBlock]: + """Convert groq message content to v1 format.""" + content_blocks: list[types.ContentBlock] = [] + + if reasoning_block := _extract_reasoning_from_additional_kwargs(message): + content_blocks.append(reasoning_block) + + if executed_tools := message.additional_kwargs.get("executed_tools"): + for idx, executed_tool in enumerate(executed_tools): + args: dict[str, Any] | None = None + if arguments := executed_tool.get("arguments"): + try: + args = json.loads(arguments) + except json.JSONDecodeError: + if executed_tool.get("type") == "python": + try: + args = _parse_code_json(arguments) + except ValueError: + continue + elif ( + executed_tool.get("type") == "function" + and executed_tool.get("name") == "python" + ): + # GPT-OSS + args = {"code": arguments} + else: + continue + if isinstance(args, dict): + name = "" + if executed_tool.get("type") == "search": + name = "web_search" + elif executed_tool.get("type") == "python" or ( + executed_tool.get("type") == "function" + and executed_tool.get("name") == "python" + ): + name = "code_interpreter" + server_tool_call: types.ServerToolCall = { + "type": "server_tool_call", + "name": name, + "id": str(idx), + "args": args, + } + content_blocks.append(server_tool_call) + if tool_output := executed_tool.get("output"): + tool_result: types.ServerToolResult = { + "type": "server_tool_result", + "tool_call_id": str(idx), + "output": tool_output, + "status": "success", + } + known_fields = {"type", "arguments", "index", "output"} + _populate_extras(tool_result, executed_tool, known_fields) + content_blocks.append(tool_result) + + if isinstance(message.content, str) and message.content: + content_blocks.append({"type": "text", "text": message.content}) + + for tool_call in message.tool_calls: + content_blocks.append( # noqa: PERF401 + { + "type": "tool_call", + "name": tool_call["name"], + "args": tool_call["args"], + "id": tool_call.get("id"), + } ) - warnings.warn(warning_message, stacklevel=2) - WARNED = True - raise NotImplementedError + + return content_blocks + + +def translate_content(message: AIMessage) -> list[types.ContentBlock]: + """Derive standard content blocks from a message with groq content.""" + return _convert_to_v1_from_groq(message) + + +def translate_content_chunk(message: AIMessageChunk) -> list[types.ContentBlock]: + """Derive standard content blocks from a message chunk with groq content.""" + return _convert_to_v1_from_groq(message) def _register_groq_translator() -> None: - """Register the Groq translator with the central registry. + """Register the groq translator with the central registry. Run automatically when the module is imported. """ diff --git a/libs/core/langchain_core/messages/block_translators/langchain_v0.py b/libs/core/langchain_core/messages/block_translators/langchain_v0.py index 2172bf6e829..f7cb03839e8 100644 --- a/libs/core/langchain_core/messages/block_translators/langchain_v0.py +++ b/libs/core/langchain_core/messages/block_translators/langchain_v0.py @@ -10,7 +10,7 @@ def _convert_v0_multimodal_input_to_v1( ) -> list[types.ContentBlock]: """Convert v0 multimodal blocks to v1 format. - During the `.content_blocks` parsing process, we wrap blocks not recognized as a v1 + During the `content_blocks` parsing process, we wrap blocks not recognized as a v1 block as a `'non_standard'` block with the original block stored in the `value` field. This function attempts to unpack those blocks and convert any v0 format blocks to v1 format. diff --git a/libs/core/langchain_core/messages/block_translators/openai.py b/libs/core/langchain_core/messages/block_translators/openai.py index 5d60ce025ac..f70c15feca3 100644 --- a/libs/core/langchain_core/messages/block_translators/openai.py +++ b/libs/core/langchain_core/messages/block_translators/openai.py @@ -155,7 +155,7 @@ def _convert_to_v1_from_chat_completions_input( ) -> list[types.ContentBlock]: """Convert OpenAI Chat Completions format blocks to v1 format. - During the `.content_blocks` parsing process, we wrap blocks not recognized as a v1 + During the `content_blocks` parsing process, we wrap blocks not recognized as a v1 block as a `'non_standard'` block with the original block stored in the `value` field. This function attempts to unpack those blocks and convert any blocks that might be OpenAI format to v1 ContentBlocks. diff --git a/libs/core/langchain_core/messages/chat.py b/libs/core/langchain_core/messages/chat.py index 2050dd7fa0b..6786efcacf4 100644 --- a/libs/core/langchain_core/messages/chat.py +++ b/libs/core/langchain_core/messages/chat.py @@ -19,7 +19,7 @@ class ChatMessage(BaseMessage): """The speaker / role of the Message.""" type: Literal["chat"] = "chat" - """The type of the message (used during serialization). Defaults to "chat".""" + """The type of the message (used during serialization).""" class ChatMessageChunk(ChatMessage, BaseMessageChunk): @@ -29,11 +29,7 @@ class ChatMessageChunk(ChatMessage, BaseMessageChunk): # to make sure that the chunk variant can be discriminated from the # non-chunk variant. type: Literal["ChatMessageChunk"] = "ChatMessageChunk" # type: ignore[assignment] - """The type of the message (used during serialization). - - Defaults to `'ChatMessageChunk'`. - - """ + """The type of the message (used during serialization).""" @override def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore[override] diff --git a/libs/core/langchain_core/messages/content.py b/libs/core/langchain_core/messages/content.py index 9f14baea9b7..568579bc203 100644 --- a/libs/core/langchain_core/messages/content.py +++ b/libs/core/langchain_core/messages/content.py @@ -143,7 +143,7 @@ class Citation(TypedDict): not the source text. This means that the indices are relative to the model's response, not the original document (as specified in the `url`). - !!! note + !!! note "Factory function" `create_citation` may also be used as a factory to create a `Citation`. Benefits include: @@ -156,7 +156,9 @@ class Citation(TypedDict): """Type of the content block. Used for discrimination.""" id: NotRequired[str] - """Content block identifier. Either: + """Content block identifier. + + Either: - Generated by the provider (e.g., OpenAI's file ID) - Generated by LangChain upon creation (`UUID4` prefixed with `'lc_'`)) @@ -201,6 +203,7 @@ class NonStandardAnnotation(TypedDict): """Content block identifier. Either: + - Generated by the provider (e.g., OpenAI's file ID) - Generated by LangChain upon creation (`UUID4` prefixed with `'lc_'`)) @@ -211,6 +214,7 @@ class NonStandardAnnotation(TypedDict): Annotation = Citation | NonStandardAnnotation +"""A union of all defined `Annotation` types.""" class TextContentBlock(TypedDict): @@ -219,7 +223,7 @@ class TextContentBlock(TypedDict): This typically represents the main text content of a message, such as the response from a language model or the text of a user message. - !!! note + !!! note "Factory function" `create_text_block` may also be used as a factory to create a `TextContentBlock`. Benefits include: @@ -235,6 +239,7 @@ class TextContentBlock(TypedDict): """Content block identifier. Either: + - Generated by the provider (e.g., OpenAI's file ID) - Generated by LangChain upon creation (`UUID4` prefixed with `'lc_'`)) @@ -254,7 +259,7 @@ class TextContentBlock(TypedDict): class ToolCall(TypedDict): - """Represents a request to call a tool. + """Represents an AI's request to call a tool. Example: ```python @@ -264,7 +269,7 @@ class ToolCall(TypedDict): This represents a request to call the tool named "foo" with arguments {"a": 1} and an identifier of "123". - !!! note + !!! note "Factory function" `create_tool_call` may also be used as a factory to create a `ToolCall`. Benefits include: @@ -299,7 +304,7 @@ class ToolCall(TypedDict): class ToolCallChunk(TypedDict): - """A chunk of a tool call (e.g., as part of a stream). + """A chunk of a tool call (yielded when streaming). When merging `ToolCallChunks` (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -381,7 +386,10 @@ class InvalidToolCall(TypedDict): class ServerToolCall(TypedDict): - """Tool call that is executed server-side.""" + """Tool call that is executed server-side. + + For example: code execution, web search, etc. + """ type: Literal["server_tool_call"] """Used for discrimination.""" @@ -403,7 +411,7 @@ class ServerToolCall(TypedDict): class ServerToolCallChunk(TypedDict): - """A chunk of a tool call (as part of a stream).""" + """A chunk of a server-side tool call (yielded when streaming).""" type: Literal["server_tool_call_chunk"] """Used for discrimination.""" @@ -452,7 +460,7 @@ class ServerToolResult(TypedDict): class ReasoningContentBlock(TypedDict): """Reasoning output from a LLM. - !!! note + !!! note "Factory function" `create_reasoning_block` may also be used as a factory to create a `ReasoningContentBlock`. Benefits include: @@ -468,6 +476,7 @@ class ReasoningContentBlock(TypedDict): """Content block identifier. Either: + - Generated by the provider (e.g., OpenAI's file ID) - Generated by LangChain upon creation (`UUID4` prefixed with `'lc_'`)) @@ -494,7 +503,7 @@ class ReasoningContentBlock(TypedDict): class ImageContentBlock(TypedDict): """Image data. - !!! note + !!! note "Factory function" `create_image_block` may also be used as a factory to create a `ImageContentBlock`. Benefits include: @@ -510,6 +519,7 @@ class ImageContentBlock(TypedDict): """Content block identifier. Either: + - Generated by the provider (e.g., OpenAI's file ID) - Generated by LangChain upon creation (`UUID4` prefixed with `'lc_'`)) @@ -541,7 +551,7 @@ class ImageContentBlock(TypedDict): class VideoContentBlock(TypedDict): """Video data. - !!! note + !!! note "Factory function" `create_video_block` may also be used as a factory to create a `VideoContentBlock`. Benefits include: @@ -557,6 +567,7 @@ class VideoContentBlock(TypedDict): """Content block identifier. Either: + - Generated by the provider (e.g., OpenAI's file ID) - Generated by LangChain upon creation (`UUID4` prefixed with `'lc_'`)) @@ -588,7 +599,7 @@ class VideoContentBlock(TypedDict): class AudioContentBlock(TypedDict): """Audio data. - !!! note + !!! note "Factory function" `create_audio_block` may also be used as a factory to create an `AudioContentBlock`. Benefits include: * Automatic ID generation (when not provided) @@ -603,6 +614,7 @@ class AudioContentBlock(TypedDict): """Content block identifier. Either: + - Generated by the provider (e.g., OpenAI's file ID) - Generated by LangChain upon creation (`UUID4` prefixed with `'lc_'`)) @@ -632,7 +644,7 @@ class AudioContentBlock(TypedDict): class PlainTextContentBlock(TypedDict): - """Plaintext data (e.g., from a document). + """Plaintext data (e.g., from a `.txt` or `.md` document). !!! note A `PlainTextContentBlock` existed in `langchain-core<1.0.0`. Although the @@ -642,9 +654,9 @@ class PlainTextContentBlock(TypedDict): !!! note Title and context are optional fields that may be passed to the model. See - Anthropic [example](https://docs.anthropic.com/en/docs/build-with-claude/citations#citable-vs-non-citable-content). + Anthropic [example](https://docs.claude.com/en/docs/build-with-claude/citations#citable-vs-non-citable-content). - !!! note + !!! note "Factory function" `create_plaintext_block` may also be used as a factory to create a `PlainTextContentBlock`. Benefits include: @@ -660,6 +672,7 @@ class PlainTextContentBlock(TypedDict): """Content block identifier. Either: + - Generated by the provider (e.g., OpenAI's file ID) - Generated by LangChain upon creation (`UUID4` prefixed with `'lc_'`)) @@ -694,7 +707,7 @@ class PlainTextContentBlock(TypedDict): class FileContentBlock(TypedDict): - """File data that doesn't fit into other multimodal blocks. + """File data that doesn't fit into other multimodal block types. This block is intended for files that are not images, audio, or plaintext. For example, it can be used for PDFs, Word documents, etc. @@ -703,7 +716,7 @@ class FileContentBlock(TypedDict): content block type (e.g., `ImageContentBlock`, `AudioContentBlock`, `PlainTextContentBlock`). - !!! note + !!! note "Factory function" `create_file_block` may also be used as a factory to create a `FileContentBlock`. Benefits include: @@ -719,6 +732,7 @@ class FileContentBlock(TypedDict): """Content block identifier. Either: + - Generated by the provider (e.g., OpenAI's file ID) - Generated by LangChain upon creation (`UUID4` prefixed with `'lc_'`)) @@ -753,7 +767,7 @@ class FileContentBlock(TypedDict): class NonStandardContentBlock(TypedDict): - """Provider-specific data. + """Provider-specific content data. This block contains data for which there is not yet a standard type. @@ -765,7 +779,7 @@ class NonStandardContentBlock(TypedDict): Has no `extras` field, as provider-specific data should be included in the `value` field. - !!! note + !!! note "Factory function" `create_non_standard_block` may also be used as a factory to create a `NonStandardContentBlock`. Benefits include: @@ -781,13 +795,14 @@ class NonStandardContentBlock(TypedDict): """Content block identifier. Either: + - Generated by the provider (e.g., OpenAI's file ID) - Generated by LangChain upon creation (`UUID4` prefixed with `'lc_'`)) """ value: dict[str, Any] - """Provider-specific data.""" + """Provider-specific content data.""" index: NotRequired[int | str] """Index of block in aggregate response. Used during streaming.""" @@ -801,6 +816,7 @@ DataContentBlock = ( | PlainTextContentBlock | FileContentBlock ) +"""A union of all defined multimodal data `ContentBlock` types.""" ToolContentBlock = ( ToolCall | ToolCallChunk | ServerToolCall | ServerToolCallChunk | ServerToolResult @@ -814,6 +830,7 @@ ContentBlock = ( | DataContentBlock | ToolContentBlock ) +"""A union of all defined `ContentBlock` types and aliases.""" KNOWN_BLOCK_TYPES = { @@ -850,7 +867,7 @@ def _get_data_content_block_types() -> tuple[str, ...]: Example: ("image", "video", "audio", "text-plain", "file") Note that old style multimodal blocks type literals with new style blocks. - Speficially, "image", "audio", and "file". + Specifically, "image", "audio", and "file". See the docstring of `_normalize_messages` in `language_models._utils` for details. """ @@ -877,7 +894,7 @@ def is_data_content_block(block: dict) -> bool: block: The content block to check. Returns: - True if the content block is a data content block, False otherwise. + `True` if the content block is a data content block, `False` otherwise. """ if block.get("type") not in _get_data_content_block_types(): @@ -889,7 +906,7 @@ def is_data_content_block(block: dict) -> bool: # 'text' is checked to support v0 PlainTextContentBlock types # We must guard against new style TextContentBlock which also has 'text' `type` - # by ensuring the presense of `source_type` + # by ensuring the presence of `source_type` if block["type"] == "text" and "source_type" not in block: # noqa: SIM103 # This is more readable return False @@ -1382,7 +1399,7 @@ def create_non_standard_block( """Create a `NonStandardContentBlock`. Args: - value: Provider-specific data. + value: Provider-specific content data. id: Content block identifier. Generated automatically if not provided. index: Index of block in aggregate response. Used during streaming. diff --git a/libs/core/langchain_core/messages/function.py b/libs/core/langchain_core/messages/function.py index 2bd63e04e69..ee0dad3975f 100644 --- a/libs/core/langchain_core/messages/function.py +++ b/libs/core/langchain_core/messages/function.py @@ -19,7 +19,7 @@ class FunctionMessage(BaseMessage): do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. """ @@ -28,7 +28,7 @@ class FunctionMessage(BaseMessage): """The name of the function that was executed.""" type: Literal["function"] = "function" - """The type of the message (used for serialization). Defaults to `'function'`.""" + """The type of the message (used for serialization).""" class FunctionMessageChunk(FunctionMessage, BaseMessageChunk): @@ -38,11 +38,7 @@ class FunctionMessageChunk(FunctionMessage, BaseMessageChunk): # to make sure that the chunk variant can be discriminated from the # non-chunk variant. type: Literal["FunctionMessageChunk"] = "FunctionMessageChunk" # type: ignore[assignment] - """The type of the message (used for serialization). - - Defaults to `'FunctionMessageChunk'`. - - """ + """The type of the message (used for serialization).""" @override def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore[override] diff --git a/libs/core/langchain_core/messages/human.py b/libs/core/langchain_core/messages/human.py index d4626b6f415..338e2213700 100644 --- a/libs/core/langchain_core/messages/human.py +++ b/libs/core/langchain_core/messages/human.py @@ -7,9 +7,9 @@ from langchain_core.messages.base import BaseMessage, BaseMessageChunk class HumanMessage(BaseMessage): - """Message from a human. + """Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -27,11 +27,7 @@ class HumanMessage(BaseMessage): """ type: Literal["human"] = "human" - """The type of the message (used for serialization). - - Defaults to `'human'`. - - """ + """The type of the message (used for serialization).""" @overload def __init__( @@ -71,5 +67,4 @@ class HumanMessageChunk(HumanMessage, BaseMessageChunk): # to make sure that the chunk variant can be discriminated from the # non-chunk variant. type: Literal["HumanMessageChunk"] = "HumanMessageChunk" # type: ignore[assignment] - """The type of the message (used for serialization). - Defaults to "HumanMessageChunk".""" + """The type of the message (used for serialization).""" diff --git a/libs/core/langchain_core/messages/modifier.py b/libs/core/langchain_core/messages/modifier.py index 364af242ea5..2175be492e8 100644 --- a/libs/core/langchain_core/messages/modifier.py +++ b/libs/core/langchain_core/messages/modifier.py @@ -9,7 +9,7 @@ class RemoveMessage(BaseMessage): """Message responsible for deleting other messages.""" type: Literal["remove"] = "remove" - """The type of the message (used for serialization). Defaults to "remove".""" + """The type of the message (used for serialization).""" def __init__( self, diff --git a/libs/core/langchain_core/messages/system.py b/libs/core/langchain_core/messages/system.py index 789506ce5ce..4a60811dffc 100644 --- a/libs/core/langchain_core/messages/system.py +++ b/libs/core/langchain_core/messages/system.py @@ -27,11 +27,7 @@ class SystemMessage(BaseMessage): """ type: Literal["system"] = "system" - """The type of the message (used for serialization). - - Defaults to `'system'`. - - """ + """The type of the message (used for serialization).""" @overload def __init__( @@ -71,8 +67,4 @@ class SystemMessageChunk(SystemMessage, BaseMessageChunk): # to make sure that the chunk variant can be discriminated from the # non-chunk variant. type: Literal["SystemMessageChunk"] = "SystemMessageChunk" # type: ignore[assignment] - """The type of the message (used for serialization). - - Defaults to `'SystemMessageChunk'`. - - """ + """The type of the message (used for serialization).""" diff --git a/libs/core/langchain_core/messages/tool.py b/libs/core/langchain_core/messages/tool.py index 47128bf5e42..6993ad61c6a 100644 --- a/libs/core/langchain_core/messages/tool.py +++ b/libs/core/langchain_core/messages/tool.py @@ -31,36 +31,34 @@ class ToolMessage(BaseMessage, ToolOutputMixin): Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. """ @@ -69,11 +67,7 @@ class ToolMessage(BaseMessage, ToolOutputMixin): """Tool call that this message is responding to.""" type: Literal["tool"] = "tool" - """The type of the message (used for serialization). - - Defaults to `'tool'`. - - """ + """The type of the message (used for serialization).""" artifact: Any = None """Artifact of the Tool execution which is not meant to be sent to the model. @@ -82,21 +76,15 @@ class ToolMessage(BaseMessage, ToolOutputMixin): a subset of the full tool output is being passed as message content but the full output is needed in other parts of the code. - !!! version-added "Added in version 0.2.17" - """ status: Literal["success", "error"] = "success" - """Status of the tool invocation. - - !!! version-added "Added in version 0.2.24" - - """ + """Status of the tool invocation.""" additional_kwargs: dict = Field(default_factory=dict, repr=False) - """Currently inherited from BaseMessage, but not used.""" + """Currently inherited from `BaseMessage`, but not used.""" response_metadata: dict = Field(default_factory=dict, repr=False) - """Currently inherited from BaseMessage, but not used.""" + """Currently inherited from `BaseMessage`, but not used.""" @model_validator(mode="before") @classmethod @@ -164,12 +152,12 @@ class ToolMessage(BaseMessage, ToolOutputMixin): content_blocks: list[types.ContentBlock] | None = None, **kwargs: Any, ) -> None: - """Initialize `ToolMessage`. + """Initialize a `ToolMessage`. Specify `content` as positional arg or `content_blocks` for typing. Args: - content: The string contents of the message. + content: The contents of the message. content_blocks: Typed standard content. **kwargs: Additional fields. """ @@ -215,7 +203,7 @@ class ToolMessageChunk(ToolMessage, BaseMessageChunk): class ToolCall(TypedDict): - """Represents a request to call a tool. + """Represents an AI's request to call a tool. Example: ```python @@ -261,7 +249,7 @@ def tool_call( class ToolCallChunk(TypedDict): - """A chunk of a tool call (e.g., as part of a stream). + """A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their diff --git a/libs/core/langchain_core/messages/utils.py b/libs/core/langchain_core/messages/utils.py index 9b3643c0899..9c0c89a23cb 100644 --- a/libs/core/langchain_core/messages/utils.py +++ b/libs/core/langchain_core/messages/utils.py @@ -86,6 +86,7 @@ AnyMessage = Annotated[ | Annotated[ToolMessageChunk, Tag(tag="ToolMessageChunk")], Field(discriminator=Discriminator(_get_type)), ] +"""A type representing any defined `Message` or `MessageChunk` type.""" def get_buffer_string( @@ -96,9 +97,7 @@ def get_buffer_string( Args: messages: Messages to be converted to strings. human_prefix: The prefix to prepend to contents of `HumanMessage`s. - Default is `'Human'`. - ai_prefix: The prefix to prepend to contents of `AIMessage`. Default is - `'AI'`. + ai_prefix: The prefix to prepend to contents of `AIMessage`. Returns: A single string concatenation of all input messages. @@ -211,6 +210,7 @@ def message_chunk_to_message(chunk: BaseMessage) -> BaseMessage: MessageLikeRepresentation = ( BaseMessage | list[str] | tuple[str, str] | str | dict[str, Any] ) +"""A type representing the various ways a message can be represented.""" def _create_message_from_message_type( @@ -227,10 +227,10 @@ def _create_message_from_message_type( Args: message_type: (str) the type of the message (e.g., `'human'`, `'ai'`, etc.). content: (str) the content string. - name: (str) the name of the message. Default is None. - tool_call_id: (str) the tool call id. Default is None. - tool_calls: (list[dict[str, Any]]) the tool calls. Default is None. - id: (str) the id of the message. Default is None. + name: (str) the name of the message. + tool_call_id: (str) the tool call id. + tool_calls: (list[dict[str, Any]]) the tool calls. + id: (str) the id of the message. additional_kwargs: (dict[str, Any]) additional keyword arguments. Returns: @@ -319,7 +319,7 @@ def _convert_to_message(message: MessageLikeRepresentation) -> BaseMessage: message: a representation of a message in one of the supported formats. Returns: - an instance of a message or a message template. + An instance of a message or a message template. Raises: NotImplementedError: if the message type is not supported. @@ -328,12 +328,16 @@ def _convert_to_message(message: MessageLikeRepresentation) -> BaseMessage: """ if isinstance(message, BaseMessage): message_ = message - elif isinstance(message, str): - message_ = _create_message_from_message_type("human", message) - elif isinstance(message, Sequence) and len(message) == 2: - # mypy doesn't realise this can't be a string given the previous branch - message_type_str, template = message # type: ignore[misc] - message_ = _create_message_from_message_type(message_type_str, template) + elif isinstance(message, Sequence): + if isinstance(message, str): + message_ = _create_message_from_message_type("human", message) + else: + try: + message_type_str, template = message + except ValueError as e: + msg = "Message as a sequence must be (role string, template)" + raise NotImplementedError(msg) from e + message_ = _create_message_from_message_type(message_type_str, template) elif isinstance(message, dict): msg_kwargs = message.copy() try: @@ -425,22 +429,22 @@ def filter_messages( Args: messages: Sequence Message-like objects to filter. - include_names: Message names to include. Default is None. - exclude_names: Messages names to exclude. Default is None. + include_names: Message names to include. + exclude_names: Messages names to exclude. include_types: Message types to include. Can be specified as string names (e.g. `'system'`, `'human'`, `'ai'`, ...) or as `BaseMessage` classes (e.g. `SystemMessage`, `HumanMessage`, `AIMessage`, ...). - Default is None. + exclude_types: Message types to exclude. Can be specified as string names (e.g. `'system'`, `'human'`, `'ai'`, ...) or as `BaseMessage` classes (e.g. `SystemMessage`, `HumanMessage`, `AIMessage`, ...). - Default is None. - include_ids: Message IDs to include. Default is None. - exclude_ids: Message IDs to exclude. Default is None. - exclude_tool_calls: Tool call IDs to exclude. Default is None. + + include_ids: Message IDs to include. + exclude_ids: Message IDs to exclude. + exclude_tool_calls: Tool call IDs to exclude. Can be one of the following: - - `True`: all `AIMessage`s with tool calls and all - `ToolMessage` objects will be excluded. + - `True`: All `AIMessage` objects with tool calls and all `ToolMessage` + objects will be excluded. - a sequence of tool call IDs to exclude: - `ToolMessage` objects with the corresponding tool call ID will be excluded. @@ -568,7 +572,6 @@ def merge_message_runs( Args: messages: Sequence Message-like objects to merge. chunk_separator: Specify the string to be inserted between message chunks. - Defaults to `'\n'`. Returns: list of BaseMessages with consecutive runs of message types merged into single @@ -703,7 +706,7 @@ def trim_messages( r"""Trim messages to be below a token count. `trim_messages` can be used to reduce the size of a chat history to a specified - token count or specified message count. + token or message count. In either case, if passing the trimmed chat history back into a chat model directly, the resulting chat history should usually satisfy the following @@ -714,8 +717,6 @@ def trim_messages( followed by a `HumanMessage`. To achieve this, set `start_on='human'`. In addition, generally a `ToolMessage` can only appear after an `AIMessage` that involved a tool call. - Please see the following link for more information about messages: - https://python.langchain.com/docs/concepts/#messages 2. It includes recent messages and drops old messages in the chat history. To achieve this set the `strategy='last'`. 3. Usually, the new chat history should include the `SystemMessage` if it @@ -745,12 +746,10 @@ def trim_messages( strategy: Strategy for trimming. - `'first'`: Keep the first `<= n_count` tokens of the messages. - `'last'`: Keep the last `<= n_count` tokens of the messages. - Default is `'last'`. allow_partial: Whether to split a message if only part of the message can be included. If `strategy='last'` then the last partial contents of a message are included. If `strategy='first'` then the first partial contents of a message are included. - Default is False. end_on: The message type to end on. If specified then every message after the last occurrence of this type is ignored. If `strategy='last'` then this is done before we attempt to get the last `max_tokens`. If @@ -759,7 +758,7 @@ def trim_messages( `'human'`, `'ai'`, ...) or as `BaseMessage` classes (e.g. `SystemMessage`, `HumanMessage`, `AIMessage`, ...). Can be a single type or a list of types. - Default is None. + start_on: The message type to start on. Should only be specified if `strategy='last'`. If specified then every message before the first occurrence of this type is ignored. This is done after we trim @@ -768,10 +767,9 @@ def trim_messages( specified as string names (e.g. `'system'`, `'human'`, `'ai'`, ...) or as `BaseMessage` classes (e.g. `SystemMessage`, `HumanMessage`, `AIMessage`, ...). Can be a single type or a list of types. - Default is None. - include_system: Whether to keep the SystemMessage if there is one at index 0. - Should only be specified if `strategy="last"`. - Default is False. + + include_system: Whether to keep the `SystemMessage` if there is one at index + `0`. Should only be specified if `strategy="last"`. text_splitter: Function or `langchain_text_splitters.TextSplitter` for splitting the string contents of a message. Only used if `allow_partial=True`. If `strategy='last'` then the last split tokens @@ -782,7 +780,7 @@ def trim_messages( newlines. Returns: - list of trimmed `BaseMessage`. + List of trimmed `BaseMessage`. Raises: ValueError: if two incompatible arguments are specified or an unrecognized @@ -1031,18 +1029,18 @@ def convert_to_openai_messages( messages: Message-like object or iterable of objects whose contents are in OpenAI, Anthropic, Bedrock Converse, or VertexAI formats. text_format: How to format string or text block contents: - - `'string'`: - If a message has a string content, this is left as a string. If - a message has content blocks that are all of type `'text'`, these - are joined with a newline to make a single string. If a message has - content blocks and at least one isn't of type `'text'`, then - all blocks are left as dicts. - - `'block'`: - If a message has a string content, this is turned into a list - with a single content block of type `'text'`. If a message has - content blocks these are left as is. - include_id: Whether to include message ids in the openai messages, if they - are present in the source messages. + - `'string'`: + If a message has a string content, this is left as a string. If + a message has content blocks that are all of type `'text'`, these + are joined with a newline to make a single string. If a message has + content blocks and at least one isn't of type `'text'`, then + all blocks are left as dicts. + - `'block'`: + If a message has a string content, this is turned into a list + with a single content block of type `'text'`. If a message has + content blocks these are left as is. + include_id: Whether to include message IDs in the openai messages, if they + are present in the source messages. Raises: ValueError: if an unrecognized `text_format` is specified, or if a message @@ -1103,7 +1101,7 @@ def convert_to_openai_messages( # ] ``` - !!! version-added "Added in version 0.3.11" + !!! version-added "Added in `langchain-core` 0.3.11" """ # noqa: E501 if text_format not in {"string", "block"}: @@ -1683,12 +1681,12 @@ def count_tokens_approximately( Args: messages: List of messages to count tokens for. chars_per_token: Number of characters per token to use for the approximation. - Default is 4 (one token corresponds to ~4 chars for common English text). - You can also specify float values for more fine-grained control. + One token corresponds to ~4 chars for common English text. + You can also specify `float` values for more fine-grained control. [See more here](https://platform.openai.com/tokenizer). - extra_tokens_per_message: Number of extra tokens to add per message. - Default is 3 (special tokens, including beginning/end of message). - You can also specify float values for more fine-grained control. + extra_tokens_per_message: Number of extra tokens to add per message, e.g. + special tokens, including beginning/end of message. + You can also specify `float` values for more fine-grained control. [See more here](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb). count_name: Whether to include message names in the count. Enabled by default. @@ -1703,7 +1701,7 @@ def count_tokens_approximately( Warning: This function does not currently support counting image tokens. - !!! version-added "Added in version 0.3.46" + !!! version-added "Added in `langchain-core` 0.3.46" """ token_count = 0.0 diff --git a/libs/core/langchain_core/output_parsers/__init__.py b/libs/core/langchain_core/output_parsers/__init__.py index 81c40b73a43..7bd9c0ca893 100644 --- a/libs/core/langchain_core/output_parsers/__init__.py +++ b/libs/core/langchain_core/output_parsers/__init__.py @@ -1,4 +1,20 @@ -"""**OutputParser** classes parse the output of an LLM call.""" +"""`OutputParser` classes parse the output of an LLM call into structured data. + +!!! tip "Structured output" + + Output parsers emerged as an early solution to the challenge of obtaining structured + output from LLMs. + + Today, most LLMs support [structured output](https://docs.langchain.com/oss/python/langchain/models#structured-outputs) + natively. In such cases, using output parsers may be unnecessary, and you should + leverage the model's built-in capabilities for structured output. Refer to the + [documentation of your chosen model](https://docs.langchain.com/oss/python/integrations/providers/overview) + for guidance on how to achieve structured output directly. + + Output parsers remain valuable when working with models that do not support + structured output natively, or when you require additional processing or validation + of the model's output beyond its inherent capabilities. +""" from typing import TYPE_CHECKING diff --git a/libs/core/langchain_core/output_parsers/base.py b/libs/core/langchain_core/output_parsers/base.py index 6786ef7c058..53f5240a96c 100644 --- a/libs/core/langchain_core/output_parsers/base.py +++ b/libs/core/langchain_core/output_parsers/base.py @@ -31,13 +31,13 @@ class BaseLLMOutputParser(ABC, Generic[T]): @abstractmethod def parse_result(self, result: list[Generation], *, partial: bool = False) -> T: - """Parse a list of candidate model Generations into a specific format. + """Parse a list of candidate model `Generation` objects into a specific format. Args: - result: A list of Generations to be parsed. The Generations are assumed - to be different candidate outputs for a single model input. + result: A list of `Generation` to be parsed. The `Generation` objects are + assumed to be different candidate outputs for a single model input. partial: Whether to parse the output as a partial result. This is useful - for parsers that can parse partial results. Default is False. + for parsers that can parse partial results. Returns: Structured output. @@ -46,17 +46,17 @@ class BaseLLMOutputParser(ABC, Generic[T]): async def aparse_result( self, result: list[Generation], *, partial: bool = False ) -> T: - """Async parse a list of candidate model Generations into a specific format. + """Async parse a list of candidate model `Generation` objects into a specific format. Args: - result: A list of Generations to be parsed. The Generations are assumed + result: A list of `Generation` to be parsed. The Generations are assumed to be different candidate outputs for a single model input. partial: Whether to parse the output as a partial result. This is useful - for parsers that can parse partial results. Default is False. + for parsers that can parse partial results. Returns: Structured output. - """ + """ # noqa: E501 return await run_in_executor(None, self.parse_result, result, partial=partial) @@ -135,6 +135,9 @@ class BaseOutputParser( Example: ```python + # Implement a simple boolean output parser + + class BooleanOutputParser(BaseOutputParser[bool]): true_val: str = "YES" false_val: str = "NO" @@ -172,7 +175,7 @@ class BaseOutputParser( This property is inferred from the first type argument of the class. Raises: - TypeError: If the class doesn't have an inferable OutputType. + TypeError: If the class doesn't have an inferable `OutputType`. """ for base in self.__class__.mro(): if hasattr(base, "__pydantic_generic_metadata__"): @@ -234,16 +237,16 @@ class BaseOutputParser( @override def parse_result(self, result: list[Generation], *, partial: bool = False) -> T: - """Parse a list of candidate model Generations into a specific format. + """Parse a list of candidate model `Generation` objects into a specific format. - The return value is parsed from only the first Generation in the result, which - is assumed to be the highest-likelihood Generation. + The return value is parsed from only the first `Generation` in the result, which + is assumed to be the highest-likelihood `Generation`. Args: - result: A list of Generations to be parsed. The Generations are assumed - to be different candidate outputs for a single model input. + result: A list of `Generation` to be parsed. The `Generation` objects are + assumed to be different candidate outputs for a single model input. partial: Whether to parse the output as a partial result. This is useful - for parsers that can parse partial results. Default is False. + for parsers that can parse partial results. Returns: Structured output. @@ -264,20 +267,20 @@ class BaseOutputParser( async def aparse_result( self, result: list[Generation], *, partial: bool = False ) -> T: - """Async parse a list of candidate model Generations into a specific format. + """Async parse a list of candidate model `Generation` objects into a specific format. - The return value is parsed from only the first Generation in the result, which - is assumed to be the highest-likelihood Generation. + The return value is parsed from only the first `Generation` in the result, which + is assumed to be the highest-likelihood `Generation`. Args: - result: A list of Generations to be parsed. The Generations are assumed - to be different candidate outputs for a single model input. + result: A list of `Generation` to be parsed. The `Generation` objects are + assumed to be different candidate outputs for a single model input. partial: Whether to parse the output as a partial result. This is useful - for parsers that can parse partial results. Default is False. + for parsers that can parse partial results. Returns: Structured output. - """ + """ # noqa: E501 return await run_in_executor(None, self.parse_result, result, partial=partial) async def aparse(self, text: str) -> T: @@ -299,13 +302,13 @@ class BaseOutputParser( ) -> Any: """Parse the output of an LLM call with the input prompt for context. - The prompt is largely provided in the event the OutputParser wants + The prompt is largely provided in the event the `OutputParser` wants to retry or fix the output in some way, and needs information from the prompt to do so. Args: completion: String output of a language model. - prompt: Input PromptValue. + prompt: Input `PromptValue`. Returns: Structured output. diff --git a/libs/core/langchain_core/output_parsers/format_instructions.py b/libs/core/langchain_core/output_parsers/format_instructions.py index 8ad789bcd66..49898917f45 100644 --- a/libs/core/langchain_core/output_parsers/format_instructions.py +++ b/libs/core/langchain_core/output_parsers/format_instructions.py @@ -1,11 +1,16 @@ """Format instructions.""" -JSON_FORMAT_INSTRUCTIONS = """The output should be formatted as a JSON instance that conforms to the JSON schema below. +JSON_FORMAT_INSTRUCTIONS = """STRICT OUTPUT FORMAT: +- Return only the JSON value that conforms to the schema. Do not include any additional text, explanations, headings, or separators. +- Do not wrap the JSON in Markdown or code fences (no ``` or ```json). +- Do not prepend or append any text (e.g., do not write "Here is the JSON:"). +- The response must be a single top-level JSON value exactly as required by the schema (object/array/etc.), with no trailing commas or comments. -As an example, for the schema {{"properties": {{"foo": {{"title": "Foo", "description": "a list of strings", "type": "array", "items": {{"type": "string"}}}}}}, "required": ["foo"]}} -the object {{"foo": ["bar", "baz"]}} is a well-formatted instance of the schema. The object {{"properties": {{"foo": ["bar", "baz"]}}}} is not well-formatted. +The output should be formatted as a JSON instance that conforms to the JSON schema below. -Here is the output schema: +As an example, for the schema {{"properties": {{"foo": {{"title": "Foo", "description": "a list of strings", "type": "array", "items": {{"type": "string"}}}}}}, "required": ["foo"]}} the object {{"foo": ["bar", "baz"]}} is a well-formatted instance of the schema. The object {{"properties": {{"foo": ["bar", "baz"]}}}} is not well-formatted. + +Here is the output schema (shown in a code block for readability only β€” do not include any backticks or Markdown in your output): ``` {schema} ```""" # noqa: E501 diff --git a/libs/core/langchain_core/output_parsers/json.py b/libs/core/langchain_core/output_parsers/json.py index d665c33b48f..fc2b43ee01c 100644 --- a/libs/core/langchain_core/output_parsers/json.py +++ b/libs/core/langchain_core/output_parsers/json.py @@ -31,11 +31,14 @@ TBaseModel = TypeVar("TBaseModel", bound=PydanticBaseModel) class JsonOutputParser(BaseCumulativeTransformOutputParser[Any]): """Parse the output of an LLM call to a JSON object. + Probably the most reliable output parser for getting structured data that does *not* + use function calling. + When used in streaming mode, it will yield partial JSON objects containing all the keys that have been returned so far. - In streaming, if `diff` is set to `True`, yields JSONPatch operations - describing the difference between the previous and the current object. + In streaming, if `diff` is set to `True`, yields JSONPatch operations describing the + difference between the previous and the current object. """ pydantic_object: Annotated[type[TBaseModel] | None, SkipValidation()] = None # type: ignore[valid-type] @@ -62,7 +65,6 @@ class JsonOutputParser(BaseCumulativeTransformOutputParser[Any]): If `True`, the output will be a JSON object containing all the keys that have been returned so far. If `False`, the output will be the full JSON object. - Default is False. Returns: The parsed JSON object. diff --git a/libs/core/langchain_core/output_parsers/list.py b/libs/core/langchain_core/output_parsers/list.py index f2c087225c3..16b99b64f6a 100644 --- a/libs/core/langchain_core/output_parsers/list.py +++ b/libs/core/langchain_core/output_parsers/list.py @@ -41,7 +41,7 @@ def droplastn( class ListOutputParser(BaseTransformOutputParser[list[str]]): - """Parse the output of an LLM call to a list.""" + """Parse the output of a model to a list.""" @property def _type(self) -> str: @@ -74,30 +74,30 @@ class ListOutputParser(BaseTransformOutputParser[list[str]]): buffer = "" for chunk in input: if isinstance(chunk, BaseMessage): - # extract text + # Extract text chunk_content = chunk.content if not isinstance(chunk_content, str): continue buffer += chunk_content else: - # add current chunk to buffer + # Add current chunk to buffer buffer += chunk - # parse buffer into a list of parts + # Parse buffer into a list of parts try: done_idx = 0 - # yield only complete parts + # Yield only complete parts for m in droplastn(self.parse_iter(buffer), 1): done_idx = m.end() yield [m.group(1)] buffer = buffer[done_idx:] except NotImplementedError: parts = self.parse(buffer) - # yield only complete parts + # Yield only complete parts if len(parts) > 1: for part in parts[:-1]: yield [part] buffer = parts[-1] - # yield the last part + # Yield the last part for part in self.parse(buffer): yield [part] @@ -108,45 +108,45 @@ class ListOutputParser(BaseTransformOutputParser[list[str]]): buffer = "" async for chunk in input: if isinstance(chunk, BaseMessage): - # extract text + # Extract text chunk_content = chunk.content if not isinstance(chunk_content, str): continue buffer += chunk_content else: - # add current chunk to buffer + # Add current chunk to buffer buffer += chunk - # parse buffer into a list of parts + # Parse buffer into a list of parts try: done_idx = 0 - # yield only complete parts + # Yield only complete parts for m in droplastn(self.parse_iter(buffer), 1): done_idx = m.end() yield [m.group(1)] buffer = buffer[done_idx:] except NotImplementedError: parts = self.parse(buffer) - # yield only complete parts + # Yield only complete parts if len(parts) > 1: for part in parts[:-1]: yield [part] buffer = parts[-1] - # yield the last part + # Yield the last part for part in self.parse(buffer): yield [part] class CommaSeparatedListOutputParser(ListOutputParser): - """Parse the output of an LLM call to a comma-separated list.""" + """Parse the output of a model to a comma-separated list.""" @classmethod def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "output_parsers", "list"]` @@ -177,7 +177,7 @@ class CommaSeparatedListOutputParser(ListOutputParser): ) return [item for sublist in reader for item in sublist] except csv.Error: - # keep old logic for backup + # Keep old logic for backup return [part.strip() for part in text.split(",")] @property diff --git a/libs/core/langchain_core/output_parsers/openai_functions.py b/libs/core/langchain_core/output_parsers/openai_functions.py index a971884621a..71e55d798d8 100644 --- a/libs/core/langchain_core/output_parsers/openai_functions.py +++ b/libs/core/langchain_core/output_parsers/openai_functions.py @@ -31,13 +31,13 @@ class OutputFunctionsParser(BaseGenerationOutputParser[Any]): Args: result: The result of the LLM call. - partial: Whether to parse partial JSON objects. Default is False. + partial: Whether to parse partial JSON objects. Returns: The parsed JSON object. Raises: - OutputParserException: If the output is not valid JSON. + `OutputParserException`: If the output is not valid JSON. """ generation = result[0] if not isinstance(generation, ChatGeneration): @@ -56,7 +56,7 @@ class OutputFunctionsParser(BaseGenerationOutputParser[Any]): class JsonOutputFunctionsParser(BaseCumulativeTransformOutputParser[Any]): - """Parse an output as the Json object.""" + """Parse an output as the JSON object.""" strict: bool = False """Whether to allow non-JSON-compliant strings. @@ -82,13 +82,13 @@ class JsonOutputFunctionsParser(BaseCumulativeTransformOutputParser[Any]): Args: result: The result of the LLM call. - partial: Whether to parse partial JSON objects. Default is False. + partial: Whether to parse partial JSON objects. Returns: The parsed JSON object. Raises: - OutputParserException: If the output is not valid JSON. + OutputParserExcept`ion: If the output is not valid JSON. """ if len(result) != 1: msg = f"Expected exactly one result, but got {len(result)}" @@ -155,7 +155,7 @@ class JsonOutputFunctionsParser(BaseCumulativeTransformOutputParser[Any]): class JsonKeyOutputFunctionsParser(JsonOutputFunctionsParser): - """Parse an output as the element of the Json object.""" + """Parse an output as the element of the JSON object.""" key_name: str """The name of the key to return.""" @@ -165,7 +165,7 @@ class JsonKeyOutputFunctionsParser(JsonOutputFunctionsParser): Args: result: The result of the LLM call. - partial: Whether to parse partial JSON objects. Default is False. + partial: Whether to parse partial JSON objects. Returns: The parsed JSON object. @@ -177,16 +177,15 @@ class JsonKeyOutputFunctionsParser(JsonOutputFunctionsParser): class PydanticOutputFunctionsParser(OutputFunctionsParser): - """Parse an output as a pydantic object. + """Parse an output as a Pydantic object. - This parser is used to parse the output of a ChatModel that uses - OpenAI function format to invoke functions. + This parser is used to parse the output of a chat model that uses OpenAI function + format to invoke functions. - The parser extracts the function call invocation and matches - them to the pydantic schema provided. + The parser extracts the function call invocation and matches them to the Pydantic + schema provided. - An exception will be raised if the function call does not match - the provided schema. + An exception will be raised if the function call does not match the provided schema. Example: ```python @@ -221,7 +220,7 @@ class PydanticOutputFunctionsParser(OutputFunctionsParser): """ pydantic_schema: type[BaseModel] | dict[str, type[BaseModel]] - """The pydantic schema to parse the output with. + """The Pydantic schema to parse the output with. If multiple schemas are provided, then the function name will be used to determine which schema to use. @@ -230,7 +229,7 @@ class PydanticOutputFunctionsParser(OutputFunctionsParser): @model_validator(mode="before") @classmethod def validate_schema(cls, values: dict) -> Any: - """Validate the pydantic schema. + """Validate the Pydantic schema. Args: values: The values to validate. @@ -239,7 +238,7 @@ class PydanticOutputFunctionsParser(OutputFunctionsParser): The validated values. Raises: - ValueError: If the schema is not a pydantic schema. + ValueError: If the schema is not a Pydantic schema. """ schema = values["pydantic_schema"] if "args_only" not in values: @@ -262,10 +261,10 @@ class PydanticOutputFunctionsParser(OutputFunctionsParser): Args: result: The result of the LLM call. - partial: Whether to parse partial JSON objects. Default is False. + partial: Whether to parse partial JSON objects. Raises: - ValueError: If the pydantic schema is not valid. + ValueError: If the Pydantic schema is not valid. Returns: The parsed JSON object. @@ -288,13 +287,13 @@ class PydanticOutputFunctionsParser(OutputFunctionsParser): elif issubclass(pydantic_schema, BaseModelV1): pydantic_args = pydantic_schema.parse_raw(args) else: - msg = f"Unsupported pydantic schema: {pydantic_schema}" + msg = f"Unsupported Pydantic schema: {pydantic_schema}" raise ValueError(msg) return pydantic_args class PydanticAttrOutputFunctionsParser(PydanticOutputFunctionsParser): - """Parse an output as an attribute of a pydantic object.""" + """Parse an output as an attribute of a Pydantic object.""" attr_name: str """The name of the attribute to return.""" @@ -305,7 +304,7 @@ class PydanticAttrOutputFunctionsParser(PydanticOutputFunctionsParser): Args: result: The result of the LLM call. - partial: Whether to parse partial JSON objects. Default is False. + partial: Whether to parse partial JSON objects. Returns: The parsed JSON object. diff --git a/libs/core/langchain_core/output_parsers/openai_tools.py b/libs/core/langchain_core/output_parsers/openai_tools.py index 5f44b916ae2..23884abdfd3 100644 --- a/libs/core/langchain_core/output_parsers/openai_tools.py +++ b/libs/core/langchain_core/output_parsers/openai_tools.py @@ -31,10 +31,9 @@ def parse_tool_call( Args: raw_tool_call: The raw tool call to parse. - partial: Whether to parse partial JSON. Default is False. + partial: Whether to parse partial JSON. strict: Whether to allow non-JSON-compliant strings. - Default is False. - return_id: Whether to return the tool call id. Default is True. + return_id: Whether to return the tool call id. Returns: The parsed tool call. @@ -105,10 +104,9 @@ def parse_tool_calls( Args: raw_tool_calls: The raw tool calls to parse. - partial: Whether to parse partial JSON. Default is False. + partial: Whether to parse partial JSON. strict: Whether to allow non-JSON-compliant strings. - Default is False. - return_id: Whether to return the tool call id. Default is True. + return_id: Whether to return the tool call id. Returns: The parsed tool calls. @@ -165,7 +163,6 @@ class JsonOutputToolsParser(BaseCumulativeTransformOutputParser[Any]): If `True`, the output will be a JSON object containing all the keys that have been returned so far. If `False`, the output will be the full JSON object. - Default is False. Returns: The parsed tool calls. @@ -227,9 +224,8 @@ class JsonOutputKeyToolsParser(JsonOutputToolsParser): result: The result of the LLM call. partial: Whether to parse partial JSON. If `True`, the output will be a JSON object containing - all the keys that have been returned so far. + all the keys that have been returned so far. If `False`, the output will be the full JSON object. - Default is False. Raises: OutputParserException: If the generation is not a chat generation. @@ -311,9 +307,8 @@ class PydanticToolsParser(JsonOutputToolsParser): result: The result of the LLM call. partial: Whether to parse partial JSON. If `True`, the output will be a JSON object containing - all the keys that have been returned so far. + all the keys that have been returned so far. If `False`, the output will be the full JSON object. - Default is False. Returns: The parsed Pydantic objects. diff --git a/libs/core/langchain_core/output_parsers/pydantic.py b/libs/core/langchain_core/output_parsers/pydantic.py index 13eee343cd2..f9076a8158f 100644 --- a/libs/core/langchain_core/output_parsers/pydantic.py +++ b/libs/core/langchain_core/output_parsers/pydantic.py @@ -17,10 +17,10 @@ from langchain_core.utils.pydantic import ( class PydanticOutputParser(JsonOutputParser, Generic[TBaseModel]): - """Parse an output using a pydantic model.""" + """Parse an output using a Pydantic model.""" pydantic_object: Annotated[type[TBaseModel], SkipValidation()] - """The pydantic model to parse.""" + """The Pydantic model to parse.""" def _parse_obj(self, obj: dict) -> TBaseModel: try: @@ -45,21 +45,20 @@ class PydanticOutputParser(JsonOutputParser, Generic[TBaseModel]): def parse_result( self, result: list[Generation], *, partial: bool = False ) -> TBaseModel | None: - """Parse the result of an LLM call to a pydantic object. + """Parse the result of an LLM call to a Pydantic object. Args: result: The result of the LLM call. partial: Whether to parse partial JSON objects. If `True`, the output will be a JSON object containing all the keys that have been returned so far. - Defaults to `False`. Raises: - OutputParserException: If the result is not valid JSON - or does not conform to the pydantic model. + `OutputParserException`: If the result is not valid JSON + or does not conform to the Pydantic model. Returns: - The parsed pydantic object. + The parsed Pydantic object. """ try: json_object = super().parse_result(result) @@ -70,13 +69,13 @@ class PydanticOutputParser(JsonOutputParser, Generic[TBaseModel]): raise def parse(self, text: str) -> TBaseModel: - """Parse the output of an LLM call to a pydantic object. + """Parse the output of an LLM call to a Pydantic object. Args: text: The output of the LLM call. Returns: - The parsed pydantic object. + The parsed Pydantic object. """ return super().parse(text) @@ -87,7 +86,7 @@ class PydanticOutputParser(JsonOutputParser, Generic[TBaseModel]): The format instructions for the JSON output. """ # Copy schema to avoid altering original Pydantic schema. - schema = dict(self.pydantic_object.model_json_schema().items()) + schema = dict(self._get_schema(self.pydantic_object).items()) # Remove extraneous fields. reduced_schema = schema @@ -107,7 +106,7 @@ class PydanticOutputParser(JsonOutputParser, Generic[TBaseModel]): @property @override def OutputType(self) -> type[TBaseModel]: - """Return the pydantic model.""" + """Return the Pydantic model.""" return self.pydantic_object diff --git a/libs/core/langchain_core/output_parsers/string.py b/libs/core/langchain_core/output_parsers/string.py index 566daa25d82..4b189e1c467 100644 --- a/libs/core/langchain_core/output_parsers/string.py +++ b/libs/core/langchain_core/output_parsers/string.py @@ -6,20 +6,20 @@ from langchain_core.output_parsers.transform import BaseTransformOutputParser class StrOutputParser(BaseTransformOutputParser[str]): - """OutputParser that parses LLMResult into the top likely string.""" + """OutputParser that parses `LLMResult` into the top likely string.""" @classmethod def is_lc_serializable(cls) -> bool: - """StrOutputParser is serializable. + """`StrOutputParser` is serializable. Returns: - True + `True` """ return True @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "output_parser"]` diff --git a/libs/core/langchain_core/output_parsers/xml.py b/libs/core/langchain_core/output_parsers/xml.py index 718145ebb58..55e93542d7f 100644 --- a/libs/core/langchain_core/output_parsers/xml.py +++ b/libs/core/langchain_core/output_parsers/xml.py @@ -43,19 +43,19 @@ class _StreamingParser: """Streaming parser for XML. This implementation is pulled into a class to avoid implementation - drift between transform and atransform of the XMLOutputParser. + drift between transform and atransform of the `XMLOutputParser`. """ def __init__(self, parser: Literal["defusedxml", "xml"]) -> None: """Initialize the streaming parser. Args: - parser: Parser to use for XML parsing. Can be either 'defusedxml' or 'xml'. - See documentation in XMLOutputParser for more information. + parser: Parser to use for XML parsing. Can be either `'defusedxml'` or + `'xml'`. See documentation in `XMLOutputParser` for more information. Raises: - ImportError: If defusedxml is not installed and the defusedxml - parser is requested. + ImportError: If `defusedxml` is not installed and the `defusedxml` parser is + requested. """ if parser == "defusedxml": if not _HAS_DEFUSEDXML: @@ -79,10 +79,10 @@ class _StreamingParser: """Parse a chunk of text. Args: - chunk: A chunk of text to parse. This can be a string or a BaseMessage. + chunk: A chunk of text to parse. This can be a `str` or a `BaseMessage`. Yields: - A dictionary representing the parsed XML element. + A `dict` representing the parsed XML element. Raises: xml.etree.ElementTree.ParseError: If the XML is not well-formed. @@ -147,46 +147,49 @@ class _StreamingParser: class XMLOutputParser(BaseTransformOutputParser): - """Parse an output using xml format.""" + """Parse an output using xml format. + + Returns a dictionary of tags. + """ tags: list[str] | None = None """Tags to tell the LLM to expect in the XML output. Note this may not be perfect depending on the LLM implementation. - For example, with tags=["foo", "bar", "baz"]: + For example, with `tags=["foo", "bar", "baz"]`: 1. A well-formatted XML instance: - "\n \n \n \n" + `"\n \n \n \n"` 2. A badly-formatted XML instance (missing closing tag for 'bar'): - "\n \n " + `"\n \n "` 3. A badly-formatted XML instance (unexpected 'tag' element): - "\n \n \n" + `"\n \n \n"` """ encoding_matcher: re.Pattern = re.compile( r"<([^>]*encoding[^>]*)>\n(.*)", re.MULTILINE | re.DOTALL ) parser: Literal["defusedxml", "xml"] = "defusedxml" - """Parser to use for XML parsing. Can be either 'defusedxml' or 'xml'. + """Parser to use for XML parsing. Can be either `'defusedxml'` or `'xml'`. - * 'defusedxml' is the default parser and is used to prevent XML vulnerabilities - present in some distributions of Python's standard library xml. - `defusedxml` is a wrapper around the standard library parser that - sets up the parser with secure defaults. - * 'xml' is the standard library parser. + * `'defusedxml'` is the default parser and is used to prevent XML vulnerabilities + present in some distributions of Python's standard library xml. + `defusedxml` is a wrapper around the standard library parser that + sets up the parser with secure defaults. + * `'xml'` is the standard library parser. - Use `xml` only if you are sure that your distribution of the standard library - is not vulnerable to XML vulnerabilities. + Use `xml` only if you are sure that your distribution of the standard library is not + vulnerable to XML vulnerabilities. Please review the following resources for more information: * https://docs.python.org/3/library/xml.html#xml-vulnerabilities * https://github.com/tiran/defusedxml - The standard library relies on libexpat for parsing XML: - https://github.com/libexpat/libexpat + The standard library relies on [`libexpat`](https://github.com/libexpat/libexpat) + for parsing XML. """ def get_format_instructions(self) -> str: @@ -200,12 +203,12 @@ class XMLOutputParser(BaseTransformOutputParser): text: The output of an LLM call. Returns: - A dictionary representing the parsed XML. + A `dict` representing the parsed XML. Raises: OutputParserException: If the XML is not well-formed. - ImportError: If defusedxml is not installed and the defusedxml - parser is requested. + ImportError: If defus`edxml is not installed and the `defusedxml` parser is + requested. """ # Try to find XML string within triple backticks # Imports are temporarily placed here to avoid issue with caching on CI diff --git a/libs/core/langchain_core/outputs/generation.py b/libs/core/langchain_core/outputs/generation.py index 960563dac99..5fbd9c7e1b0 100644 --- a/libs/core/langchain_core/outputs/generation.py +++ b/libs/core/langchain_core/outputs/generation.py @@ -11,9 +11,8 @@ from langchain_core.utils._merge import merge_dicts class Generation(Serializable): """A single text generation output. - Generation represents the response from an - `"old-fashioned" LLM __` that - generates regular text (not chat messages). + Generation represents the response from an "old-fashioned" LLM (string-in, + string-out) that generates regular text (not chat messages). This model is used internally by chat model and will eventually be mapped to a more general `LLMResult` object, and then projected into @@ -21,8 +20,7 @@ class Generation(Serializable): LangChain users working with chat models will usually access information via `AIMessage` (returned from runnable interfaces) or `LLMResult` (available - via callbacks). Please refer the `AIMessage` and `LLMResult` schema documentation - for more information. + via callbacks). Please refer to `AIMessage` and `LLMResult` for more information. """ text: str @@ -35,16 +33,18 @@ class Generation(Serializable): """ type: Literal["Generation"] = "Generation" """Type is used exclusively for serialization purposes. - Set to "Generation" for this class.""" + + Set to "Generation" for this class. + """ @classmethod def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "output"]` @@ -53,7 +53,7 @@ class Generation(Serializable): class GenerationChunk(Generation): - """Generation chunk, which can be concatenated with other Generation chunks.""" + """`GenerationChunk`, which can be concatenated with other Generation chunks.""" def __add__(self, other: GenerationChunk) -> GenerationChunk: """Concatenate two `GenerationChunk`s. diff --git a/libs/core/langchain_core/outputs/llm_result.py b/libs/core/langchain_core/outputs/llm_result.py index ddb12a77f6d..4eb3160f397 100644 --- a/libs/core/langchain_core/outputs/llm_result.py +++ b/libs/core/langchain_core/outputs/llm_result.py @@ -97,7 +97,7 @@ class LLMResult(BaseModel): other: Another `LLMResult` object to compare against. Returns: - True if the generations and `llm_output` are equal, False otherwise. + `True` if the generations and `llm_output` are equal, `False` otherwise. """ if not isinstance(other, LLMResult): return NotImplemented diff --git a/libs/core/langchain_core/prompt_values.py b/libs/core/langchain_core/prompt_values.py index 8380a3e067a..cb29070fa65 100644 --- a/libs/core/langchain_core/prompt_values.py +++ b/libs/core/langchain_core/prompt_values.py @@ -24,20 +24,18 @@ from langchain_core.messages import ( class PromptValue(Serializable, ABC): """Base abstract class for inputs to any language model. - PromptValues can be converted to both LLM (pure text-generation) inputs and - ChatModel inputs. + `PromptValues` can be converted to both LLM (pure text-generation) inputs and + chat model inputs. """ @classmethod def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. - - This is used to determine the namespace of the object when serializing. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "prompt"]` @@ -50,7 +48,7 @@ class PromptValue(Serializable, ABC): @abstractmethod def to_messages(self) -> list[BaseMessage]: - """Return prompt as a list of Messages.""" + """Return prompt as a list of messages.""" class StringPromptValue(PromptValue): @@ -62,9 +60,7 @@ class StringPromptValue(PromptValue): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. - - This is used to determine the namespace of the object when serializing. + """Get the namespace of the LangChain object. Returns: `["langchain", "prompts", "base"]` @@ -99,9 +95,7 @@ class ChatPromptValue(PromptValue): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. - - This is used to determine the namespace of the object when serializing. + """Get the namespace of the LangChain object. Returns: `["langchain", "prompts", "chat"]` @@ -113,11 +107,11 @@ class ImageURL(TypedDict, total=False): """Image URL.""" detail: Literal["auto", "low", "high"] - """Specifies the detail level of the image. Defaults to `'auto'`. + """Specifies the detail level of the image. + Can be `'auto'`, `'low'`, or `'high'`. This follows OpenAI's Chat Completion API's image URL format. - """ url: str diff --git a/libs/core/langchain_core/prompts/base.py b/libs/core/langchain_core/prompts/base.py index 43964fe0349..f4cd86fb5cc 100644 --- a/libs/core/langchain_core/prompts/base.py +++ b/libs/core/langchain_core/prompts/base.py @@ -46,21 +46,27 @@ class BasePromptTemplate( input_variables: list[str] """A list of the names of the variables whose values are required as inputs to the - prompt.""" + prompt. + """ optional_variables: list[str] = Field(default=[]) - """optional_variables: A list of the names of the variables for placeholder - or MessagePlaceholder that are optional. These variables are auto inferred - from the prompt and user need not provide them.""" + """A list of the names of the variables for placeholder or `MessagePlaceholder` that + are optional. + + These variables are auto inferred from the prompt and user need not provide them. + """ input_types: typing.Dict[str, Any] = Field(default_factory=dict, exclude=True) # noqa: UP006 """A dictionary of the types of the variables the prompt template expects. - If not provided, all variables are assumed to be strings.""" + + If not provided, all variables are assumed to be strings. + """ output_parser: BaseOutputParser | None = None """How to parse the output of calling an LLM on this formatted prompt.""" partial_variables: Mapping[str, Any] = Field(default_factory=dict) """A dictionary of the partial variables the prompt template carries. - Partial variables populate the template so that you don't need to - pass them in every time you call the prompt.""" + Partial variables populate the template so that you don't need to pass them in every + time you call the prompt. + """ metadata: typing.Dict[str, Any] | None = None # noqa: UP006 """Metadata to be used for tracing.""" tags: list[str] | None = None @@ -96,7 +102,7 @@ class BasePromptTemplate( @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "prompt_template"]` @@ -105,7 +111,7 @@ class BasePromptTemplate( @classmethod def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True model_config = ConfigDict( @@ -127,7 +133,7 @@ class BasePromptTemplate( """Get the input schema for the prompt. Args: - config: configuration for the prompt. + config: Configuration for the prompt. Returns: The input schema for the prompt. @@ -195,8 +201,8 @@ class BasePromptTemplate( """Invoke the prompt. Args: - input: Dict, input to the prompt. - config: RunnableConfig, configuration for the prompt. + input: Input to the prompt. + config: Configuration for the prompt. Returns: The output of the prompt. @@ -221,8 +227,8 @@ class BasePromptTemplate( """Async invoke the prompt. Args: - input: Dict, input to the prompt. - config: RunnableConfig, configuration for the prompt. + input: Input to the prompt. + config: Configuration for the prompt. Returns: The output of the prompt. @@ -242,7 +248,7 @@ class BasePromptTemplate( @abstractmethod def format_prompt(self, **kwargs: Any) -> PromptValue: - """Create Prompt Value. + """Create `PromptValue`. Args: **kwargs: Any arguments to be passed to the prompt template. @@ -252,7 +258,7 @@ class BasePromptTemplate( """ async def aformat_prompt(self, **kwargs: Any) -> PromptValue: - """Async create Prompt Value. + """Async create `PromptValue`. Args: **kwargs: Any arguments to be passed to the prompt template. @@ -266,7 +272,7 @@ class BasePromptTemplate( """Return a partial of the prompt template. Args: - **kwargs: partial variables to set. + **kwargs: Partial variables to set. Returns: A partial of the prompt template. @@ -296,9 +302,9 @@ class BasePromptTemplate( A formatted string. Example: - ```python - prompt.format(variable1="foo") - ``` + ```python + prompt.format(variable1="foo") + ``` """ async def aformat(self, **kwargs: Any) -> FormatOutputType: @@ -311,9 +317,9 @@ class BasePromptTemplate( A formatted string. Example: - ```python - await prompt.aformat(variable1="foo") - ``` + ```python + await prompt.aformat(variable1="foo") + ``` """ return self.format(**kwargs) @@ -348,9 +354,9 @@ class BasePromptTemplate( NotImplementedError: If the prompt type is not implemented. Example: - ```python - prompt.save(file_path="path/prompt.yaml") - ``` + ```python + prompt.save(file_path="path/prompt.yaml") + ``` """ if self.partial_variables: msg = "Cannot save prompt with partial variables." @@ -402,23 +408,23 @@ def format_document(doc: Document, prompt: BasePromptTemplate[str]) -> str: First, this pulls information from the document from two sources: - 1. page_content: - This takes the information from the `document.page_content` - and assigns it to a variable named `page_content`. - 2. metadata: - This takes information from `document.metadata` and assigns - it to variables of the same name. + 1. `page_content`: + This takes the information from the `document.page_content` and assigns it to a + variable named `page_content`. + 2. `metadata`: + This takes information from `document.metadata` and assigns it to variables of + the same name. Those variables are then passed into the `prompt` to produce a formatted string. Args: - doc: Document, the page_content and metadata will be used to create + doc: `Document`, the `page_content` and `metadata` will be used to create the final string. - prompt: BasePromptTemplate, will be used to format the page_content - and metadata into the final string. + prompt: `BasePromptTemplate`, will be used to format the `page_content` + and `metadata` into the final string. Returns: - string of the document formatted. + String of the document formatted. Example: ```python @@ -429,7 +435,6 @@ def format_document(doc: Document, prompt: BasePromptTemplate[str]) -> str: prompt = PromptTemplate.from_template("Page {page}: {page_content}") format_document(doc, prompt) >>> "Page 1: This is a joke" - ``` """ return prompt.format(**_get_document_info(doc, prompt)) @@ -440,22 +445,22 @@ async def aformat_document(doc: Document, prompt: BasePromptTemplate[str]) -> st First, this pulls information from the document from two sources: - 1. page_content: - This takes the information from the `document.page_content` - and assigns it to a variable named `page_content`. - 2. metadata: - This takes information from `document.metadata` and assigns - it to variables of the same name. + 1. `page_content`: + This takes the information from the `document.page_content` and assigns it to a + variable named `page_content`. + 2. `metadata`: + This takes information from `document.metadata` and assigns it to variables of + the same name. Those variables are then passed into the `prompt` to produce a formatted string. Args: - doc: Document, the page_content and metadata will be used to create + doc: `Document`, the `page_content` and `metadata` will be used to create the final string. - prompt: BasePromptTemplate, will be used to format the page_content - and metadata into the final string. + prompt: `BasePromptTemplate`, will be used to format the `page_content` + and `metadata` into the final string. Returns: - string of the document formatted. + String of the document formatted. """ return await prompt.aformat(**_get_document_info(doc, prompt)) diff --git a/libs/core/langchain_core/prompts/chat.py b/libs/core/langchain_core/prompts/chat.py index 4c5d1f7810b..ac1d158e15d 100644 --- a/libs/core/langchain_core/prompts/chat.py +++ b/libs/core/langchain_core/prompts/chat.py @@ -135,7 +135,7 @@ class MessagesPlaceholder(BaseMessagePromptTemplate): n_messages: PositiveInt | None = None """Maximum number of messages to include. If `None`, then will include all. - Defaults to `None`.""" + """ def __init__( self, variable_name: str, *, optional: bool = False, **kwargs: Any @@ -147,7 +147,6 @@ class MessagesPlaceholder(BaseMessagePromptTemplate): optional: If `True` format_messages can be called with no arguments and will return an empty list. If `False` then a named argument with name `variable_name` must be passed in, even if the value is an empty list. - Defaults to `False`.] """ # mypy can't detect the init which is defined in the parent class # b/c these are BaseModel classes. @@ -195,7 +194,7 @@ class MessagesPlaceholder(BaseMessagePromptTemplate): """Human-readable representation. Args: - html: Whether to format as HTML. Defaults to `False`. + html: Whether to format as HTML. Returns: Human-readable representation. @@ -235,13 +234,13 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC): Args: template: a template. - template_format: format of the template. Defaults to "f-string". + template_format: format of the template. partial_variables: A dictionary of variables that can be used to partially fill in the template. For example, if the template is `"{variable1} {variable2}"`, and `partial_variables` is `{"variable1": "foo"}`, then the final prompt will be `"foo {variable2}"`. - Defaults to `None`. + **kwargs: keyword arguments to pass to the constructor. Returns: @@ -330,7 +329,7 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC): """Human-readable representation. Args: - html: Whether to format as HTML. Defaults to `False`. + html: Whether to format as HTML. Returns: Human-readable representation. @@ -412,9 +411,9 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate): Args: template: a template. template_format: format of the template. - Options are: 'f-string', 'mustache', 'jinja2'. Defaults to "f-string". + Options are: 'f-string', 'mustache', 'jinja2'. partial_variables: A dictionary of variables that can be used too partially. - Defaults to `None`. + **kwargs: keyword arguments to pass to the constructor. Returns: @@ -637,7 +636,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate): """Human-readable representation. Args: - html: Whether to format as HTML. Defaults to `False`. + html: Whether to format as HTML. Returns: Human-readable representation. @@ -750,7 +749,7 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC): """Human-readable representation. Args: - html: Whether to format as HTML. Defaults to `False`. + html: Whether to format as HTML. Returns: Human-readable representation. @@ -777,42 +776,36 @@ class ChatPromptTemplate(BaseChatPromptTemplate): Use to create flexible templated prompts for chat models. - Examples: - !!! warning "Behavior changed in 0.2.24" - You can pass any Message-like formats supported by - `ChatPromptTemplate.from_messages()` directly to `ChatPromptTemplate()` - init. + ```python + from langchain_core.prompts import ChatPromptTemplate - ```python - from langchain_core.prompts import ChatPromptTemplate + template = ChatPromptTemplate( + [ + ("system", "You are a helpful AI bot. Your name is {name}."), + ("human", "Hello, how are you doing?"), + ("ai", "I'm doing well, thanks!"), + ("human", "{user_input}"), + ] + ) - template = ChatPromptTemplate( - [ - ("system", "You are a helpful AI bot. Your name is {name}."), - ("human", "Hello, how are you doing?"), - ("ai", "I'm doing well, thanks!"), - ("human", "{user_input}"), - ] - ) + prompt_value = template.invoke( + { + "name": "Bob", + "user_input": "What is your name?", + } + ) + # Output: + # ChatPromptValue( + # messages=[ + # SystemMessage(content='You are a helpful AI bot. Your name is Bob.'), + # HumanMessage(content='Hello, how are you doing?'), + # AIMessage(content="I'm doing well, thanks!"), + # HumanMessage(content='What is your name?') + # ] + # ) + ``` - prompt_value = template.invoke( - { - "name": "Bob", - "user_input": "What is your name?", - } - ) - # Output: - # ChatPromptValue( - # messages=[ - # SystemMessage(content='You are a helpful AI bot. Your name is Bob.'), - # HumanMessage(content='Hello, how are you doing?'), - # AIMessage(content="I'm doing well, thanks!"), - # HumanMessage(content='What is your name?') - # ] - # ) - ``` - - Messages Placeholder: + !!! note "Messages Placeholder" ```python # In addition to Human/AI/Tool/Function messages, @@ -853,13 +846,12 @@ class ChatPromptTemplate(BaseChatPromptTemplate): # ) ``` - Single-variable template: + !!! note "Single-variable template" If your prompt has only a single input variable (i.e., 1 instance of "{variable_nams}"), and you invoke the template with a non-dict object, the prompt template will inject the provided argument into that variable location. - ```python from langchain_core.prompts import ChatPromptTemplate @@ -899,25 +891,35 @@ class ChatPromptTemplate(BaseChatPromptTemplate): """Create a chat prompt template from a variety of message formats. Args: - messages: sequence of message representations. + messages: Sequence of message representations. + A message can be represented using the following formats: - (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of - (message type, template); e.g., ("human", "{user_input}"), - (4) 2-tuple of (message class, template), (5) a string which is - shorthand for ("human", template); e.g., "{user_input}". - template_format: format of the template. Defaults to "f-string". + + 1. `BaseMessagePromptTemplate` + 2. `BaseMessage` + 3. 2-tuple of `(message type, template)`; e.g., + `("human", "{user_input}")` + 4. 2-tuple of `(message class, template)` + 5. A string which is shorthand for `("human", template)`; e.g., + `"{user_input}"` + template_format: Format of the template. input_variables: A list of the names of the variables whose values are required as inputs to the prompt. optional_variables: A list of the names of the variables for placeholder or MessagePlaceholder that are optional. + These variables are auto inferred from the prompt and user need not provide them. partial_variables: A dictionary of the partial variables the prompt - template carries. Partial variables populate the template so that you - don't need to pass them in every time you call the prompt. + template carries. + + Partial variables populate the template so that you don't need to pass + them in every time you call the prompt. validate_template: Whether to validate the template. input_types: A dictionary of the types of the variables the prompt template - expects. If not provided, all variables are assumed to be strings. + expects. + + If not provided, all variables are assumed to be strings. Examples: Instantiation from a list of message templates: @@ -971,7 +973,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "prompts", "chat"]` @@ -1122,13 +1124,18 @@ class ChatPromptTemplate(BaseChatPromptTemplate): ) ``` Args: - messages: sequence of message representations. + messages: Sequence of message representations. + A message can be represented using the following formats: - (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of - (message type, template); e.g., ("human", "{user_input}"), - (4) 2-tuple of (message class, template), (5) a string which is - shorthand for ("human", template); e.g., "{user_input}". - template_format: format of the template. Defaults to "f-string". + + 1. `BaseMessagePromptTemplate` + 2. `BaseMessage` + 3. 2-tuple of `(message type, template)`; e.g., + `("human", "{user_input}")` + 4. 2-tuple of `(message class, template)` + 5. A string which is shorthand for `("human", template)`; e.g., + `"{user_input}"` + template_format: format of the template. Returns: a chat prompt template. @@ -1239,7 +1246,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate): """Extend the chat template with a sequence of messages. Args: - messages: sequence of message representations to append. + messages: Sequence of message representations to append. """ self.messages.extend( [_convert_to_message_template(message) for message in messages] @@ -1287,7 +1294,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate): """Human-readable representation. Args: - html: Whether to format as HTML. Defaults to `False`. + html: Whether to format as HTML. Returns: Human-readable representation. @@ -1306,7 +1313,7 @@ def _create_template_from_message_type( Args: message_type: str the type of the message template (e.g., "human", "ai", etc.) template: str the template string. - template_format: format of the template. Defaults to "f-string". + template_format: format of the template. Returns: a message prompt template of the appropriate type. @@ -1336,11 +1343,25 @@ def _create_template_from_message_type( raise ValueError(msg) var_name = template[1:-1] message = MessagesPlaceholder(variable_name=var_name, optional=True) - elif len(template) == 2 and isinstance(template[1], bool): - var_name_wrapped, is_optional = template + else: + try: + var_name_wrapped, is_optional = template + except ValueError as e: + msg = ( + "Unexpected arguments for placeholder message type." + " Expected either a single string variable name" + " or a list of [variable_name: str, is_optional: bool]." + f" Got: {template}" + ) + raise ValueError(msg) from e + + if not isinstance(is_optional, bool): + msg = f"Expected is_optional to be a boolean. Got: {is_optional}" + raise ValueError(msg) # noqa: TRY004 + if not isinstance(var_name_wrapped, str): msg = f"Expected variable name to be a string. Got: {var_name_wrapped}" - raise ValueError(msg) # noqa:TRY004 + raise ValueError(msg) # noqa: TRY004 if var_name_wrapped[0] != "{" or var_name_wrapped[-1] != "}": msg = ( f"Invalid placeholder template: {var_name_wrapped}." @@ -1350,14 +1371,6 @@ def _create_template_from_message_type( var_name = var_name_wrapped[1:-1] message = MessagesPlaceholder(variable_name=var_name, optional=is_optional) - else: - msg = ( - "Unexpected arguments for placeholder message type." - " Expected either a single string variable name" - " or a list of [variable_name: str, is_optional: bool]." - f" Got: {template}" - ) - raise ValueError(msg) else: msg = ( f"Unexpected message type: {message_type}. Use one of 'human'," @@ -1383,7 +1396,7 @@ def _convert_to_message_template( Args: message: a representation of a message in one of the supported formats. - template_format: format of the template. Defaults to "f-string". + template_format: format of the template. Returns: an instance of a message or a message template. @@ -1411,10 +1424,11 @@ def _convert_to_message_template( ) raise ValueError(msg) message = (message["role"], message["content"]) - if len(message) != 2: + try: + message_type_str, template = message + except ValueError as e: msg = f"Expected 2-tuple of (role, template), got {message}" - raise ValueError(msg) - message_type_str, template = message + raise ValueError(msg) from e if isinstance(message_type_str, str): message_ = _create_template_from_message_type( message_type_str, template, template_format=template_format diff --git a/libs/core/langchain_core/prompts/dict.py b/libs/core/langchain_core/prompts/dict.py index a8b0094f78e..1d6f76384ed 100644 --- a/libs/core/langchain_core/prompts/dict.py +++ b/libs/core/langchain_core/prompts/dict.py @@ -69,12 +69,12 @@ class DictPromptTemplate(RunnableSerializable[dict, dict]): @classmethod def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain_core", "prompts", "dict"]` @@ -85,7 +85,7 @@ class DictPromptTemplate(RunnableSerializable[dict, dict]): """Human-readable representation. Args: - html: Whether to format as HTML. Defaults to `False`. + html: Whether to format as HTML. Returns: Human-readable representation. diff --git a/libs/core/langchain_core/prompts/few_shot_with_templates.py b/libs/core/langchain_core/prompts/few_shot_with_templates.py index 7d0997da9d5..0693de2205f 100644 --- a/libs/core/langchain_core/prompts/few_shot_with_templates.py +++ b/libs/core/langchain_core/prompts/few_shot_with_templates.py @@ -46,7 +46,7 @@ class FewShotPromptWithTemplates(StringPromptTemplate): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "prompts", "few_shot_with_templates"]` diff --git a/libs/core/langchain_core/prompts/image.py b/libs/core/langchain_core/prompts/image.py index 26987576641..c650a032e73 100644 --- a/libs/core/langchain_core/prompts/image.py +++ b/libs/core/langchain_core/prompts/image.py @@ -49,7 +49,7 @@ class ImagePromptTemplate(BasePromptTemplate[ImageURL]): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "prompts", "image"]` diff --git a/libs/core/langchain_core/prompts/loading.py b/libs/core/langchain_core/prompts/loading.py index 0b9ec9eefa0..c1a95f63d36 100644 --- a/libs/core/langchain_core/prompts/loading.py +++ b/libs/core/langchain_core/prompts/loading.py @@ -139,7 +139,7 @@ def load_prompt(path: str | Path, encoding: str | None = None) -> BasePromptTemp Args: path: Path to the prompt file. - encoding: Encoding of the file. Defaults to `None`. + encoding: Encoding of the file. Returns: A PromptTemplate object. diff --git a/libs/core/langchain_core/prompts/message.py b/libs/core/langchain_core/prompts/message.py index d1938ff4d4c..bf52af49590 100644 --- a/libs/core/langchain_core/prompts/message.py +++ b/libs/core/langchain_core/prompts/message.py @@ -18,12 +18,12 @@ class BaseMessagePromptTemplate(Serializable, ABC): @classmethod def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "prompts", "chat"]` @@ -32,13 +32,13 @@ class BaseMessagePromptTemplate(Serializable, ABC): @abstractmethod def format_messages(self, **kwargs: Any) -> list[BaseMessage]: - """Format messages from kwargs. Should return a list of BaseMessages. + """Format messages from kwargs. Should return a list of `BaseMessage` objects. Args: **kwargs: Keyword arguments to use for formatting. Returns: - List of BaseMessages. + List of `BaseMessage` objects. """ async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]: @@ -48,7 +48,7 @@ class BaseMessagePromptTemplate(Serializable, ABC): **kwargs: Keyword arguments to use for formatting. Returns: - List of BaseMessages. + List of `BaseMessage` objects. """ return self.format_messages(**kwargs) @@ -68,7 +68,7 @@ class BaseMessagePromptTemplate(Serializable, ABC): """Human-readable representation. Args: - html: Whether to format as HTML. Defaults to `False`. + html: Whether to format as HTML. Returns: Human-readable representation. diff --git a/libs/core/langchain_core/prompts/prompt.py b/libs/core/langchain_core/prompts/prompt.py index 59fdf0882d7..6d486dc3aea 100644 --- a/libs/core/langchain_core/prompts/prompt.py +++ b/libs/core/langchain_core/prompts/prompt.py @@ -66,7 +66,7 @@ class PromptTemplate(StringPromptTemplate): @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "prompts", "prompt"]` @@ -220,7 +220,7 @@ class PromptTemplate(StringPromptTemplate): example_separator: The separator to use in between examples. Defaults to two new line characters. prefix: String that should go before any examples. Generally includes - examples. Default to an empty string. + examples. Returns: The final prompt generated. @@ -275,13 +275,12 @@ class PromptTemplate(StringPromptTemplate): Args: template: The template to load. template_format: The format of the template. Use `jinja2` for jinja2, - `mustache` for mustache, and `f-string` for f-strings. - Defaults to `f-string`. + `mustache` for mustache, and `f-string` for f-strings. partial_variables: A dictionary of variables that can be used to partially - fill in the template. For example, if the template is - `"{variable1} {variable2}"`, and `partial_variables` is - `{"variable1": "foo"}`, then the final prompt will be - `"foo {variable2}"`. Defaults to `None`. + fill in the template. For example, if the template is + `"{variable1} {variable2}"`, and `partial_variables` is + `{"variable1": "foo"}`, then the final prompt will be + `"foo {variable2}"`. **kwargs: Any other arguments to pass to the prompt template. Returns: diff --git a/libs/core/langchain_core/prompts/string.py b/libs/core/langchain_core/prompts/string.py index 581cff8bf5e..c93615fdb30 100644 --- a/libs/core/langchain_core/prompts/string.py +++ b/libs/core/langchain_core/prompts/string.py @@ -4,7 +4,7 @@ from __future__ import annotations import warnings from abc import ABC -from collections.abc import Callable +from collections.abc import Callable, Sequence from string import Formatter from typing import Any, Literal @@ -122,13 +122,16 @@ def mustache_formatter(template: str, /, **kwargs: Any) -> str: def mustache_template_vars( template: str, ) -> set[str]: - """Get the variables from a mustache template. + """Get the top-level variables from a mustache template. + + For nested variables like `{{person.name}}`, only the top-level + key (`person`) is returned. Args: template: The template string. Returns: - The variables from the template. + The top-level variables from the template. """ variables: set[str] = set() section_depth = 0 @@ -149,9 +152,7 @@ def mustache_template_vars( Defs = dict[str, "Defs"] -def mustache_schema( - template: str, -) -> type[BaseModel]: +def mustache_schema(template: str) -> type[BaseModel]: """Get the variables from a mustache template. Args: @@ -175,6 +176,11 @@ def mustache_schema( fields[prefix] = False elif type_ in {"variable", "no escape"}: fields[prefix + tuple(key.split("."))] = True + + for fkey, fval in fields.items(): + fields[fkey] = fval and not any( + is_subsequence(fkey, k) for k in fields if k != fkey + ) defs: Defs = {} # None means leaf node while fields: field, is_leaf = fields.popitem() @@ -273,7 +279,7 @@ class StringPromptTemplate(BasePromptTemplate, ABC): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "prompts", "base"]` @@ -327,3 +333,12 @@ class StringPromptTemplate(BasePromptTemplate, ABC): def pretty_print(self) -> None: """Print a pretty representation of the prompt.""" print(self.pretty_repr(html=is_interactive_env())) # noqa: T201 + + +def is_subsequence(child: Sequence, parent: Sequence) -> bool: + """Return True if child is subsequence of parent.""" + if len(child) == 0 or len(parent) == 0: + return False + if len(parent) < len(child): + return False + return all(child[i] == parent[i] for i in range(len(child))) diff --git a/libs/core/langchain_core/prompts/structured.py b/libs/core/langchain_core/prompts/structured.py index 158c90be170..33888691f79 100644 --- a/libs/core/langchain_core/prompts/structured.py +++ b/libs/core/langchain_core/prompts/structured.py @@ -63,13 +63,13 @@ class StructuredPrompt(ChatPromptTemplate): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. For example, if the class is `langchain.llms.openai.OpenAI`, then the namespace is `["langchain", "llms", "openai"]` Returns: - The namespace of the langchain object. + The namespace of the LangChain object. """ return cls.__module__.split(".") @@ -104,19 +104,23 @@ class StructuredPrompt(ChatPromptTemplate): ) ``` Args: - messages: sequence of message representations. + messages: Sequence of message representations. + A message can be represented using the following formats: - (1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of - (message type, template); e.g., ("human", "{user_input}"), - (4) 2-tuple of (message class, template), (5) a string which is - shorthand for ("human", template); e.g., "{user_input}" - schema: a dictionary representation of function call, or a Pydantic model. + + 1. `BaseMessagePromptTemplate` + 2. `BaseMessage` + 3. 2-tuple of `(message type, template)`; e.g., + `("human", "{user_input}")` + 4. 2-tuple of `(message class, template)` + 5. A string which is shorthand for `("human", template)`; e.g., + `"{user_input}"` + schema: A dictionary representation of function call, or a Pydantic model. **kwargs: Any additional kwargs to pass through to `ChatModel.with_structured_output(schema, **kwargs)`. Returns: - a structured prompt template - + A structured prompt template """ return cls(messages, schema, **kwargs) @@ -144,7 +148,7 @@ class StructuredPrompt(ChatPromptTemplate): Args: others: The language model to pipe the structured prompt to. - name: The name of the pipeline. Defaults to `None`. + name: The name of the pipeline. Returns: A RunnableSequence object. diff --git a/libs/core/langchain_core/pydantic_v1/__init__.py b/libs/core/langchain_core/pydantic_v1/__init__.py deleted file mode 100644 index 1f7c9cb8699..00000000000 --- a/libs/core/langchain_core/pydantic_v1/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Pydantic v1 compatibility shim.""" - -from importlib import metadata - -from pydantic.v1 import * # noqa: F403 - -from langchain_core._api.deprecation import warn_deprecated - -try: - _PYDANTIC_MAJOR_VERSION: int = int(metadata.version("pydantic").split(".")[0]) -except metadata.PackageNotFoundError: - _PYDANTIC_MAJOR_VERSION = 0 - -warn_deprecated( - "0.3.0", - removal="1.0.0", - alternative="pydantic.v1 or pydantic", - message=( - "As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. " - "The langchain_core.pydantic_v1 module was a " - "compatibility shim for pydantic v1, and should no longer be used. " - "Please update the code to import from Pydantic directly.\n\n" - "For example, replace imports like: " - "`from langchain_core.pydantic_v1 import BaseModel`\n" - "with: `from pydantic import BaseModel`\n" - "or the v1 compatibility namespace if you are working in a code base " - "that has not been fully upgraded to pydantic 2 yet. " - "\tfrom pydantic.v1 import BaseModel\n" - ), -) diff --git a/libs/core/langchain_core/pydantic_v1/dataclasses.py b/libs/core/langchain_core/pydantic_v1/dataclasses.py deleted file mode 100644 index cdcdb77e3a0..00000000000 --- a/libs/core/langchain_core/pydantic_v1/dataclasses.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Pydantic v1 compatibility shim.""" - -from pydantic.v1.dataclasses import * # noqa: F403 - -from langchain_core._api import warn_deprecated - -warn_deprecated( - "0.3.0", - removal="1.0.0", - alternative="pydantic.v1 or pydantic", - message=( - "As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. " - "The langchain_core.pydantic_v1 module was a " - "compatibility shim for pydantic v1, and should no longer be used. " - "Please update the code to import from Pydantic directly.\n\n" - "For example, replace imports like: " - "`from langchain_core.pydantic_v1 import BaseModel`\n" - "with: `from pydantic import BaseModel`\n" - "or the v1 compatibility namespace if you are working in a code base " - "that has not been fully upgraded to pydantic 2 yet. " - "\tfrom pydantic.v1 import BaseModel\n" - ), -) diff --git a/libs/core/langchain_core/pydantic_v1/main.py b/libs/core/langchain_core/pydantic_v1/main.py deleted file mode 100644 index 005ad4ed347..00000000000 --- a/libs/core/langchain_core/pydantic_v1/main.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Pydantic v1 compatibility shim.""" - -from pydantic.v1.main import * # noqa: F403 - -from langchain_core._api import warn_deprecated - -warn_deprecated( - "0.3.0", - removal="1.0.0", - alternative="pydantic.v1 or pydantic", - message=( - "As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. " - "The langchain_core.pydantic_v1 module was a " - "compatibility shim for pydantic v1, and should no longer be used. " - "Please update the code to import from Pydantic directly.\n\n" - "For example, replace imports like: " - "`from langchain_core.pydantic_v1 import BaseModel`\n" - "with: `from pydantic import BaseModel`\n" - "or the v1 compatibility namespace if you are working in a code base " - "that has not been fully upgraded to pydantic 2 yet. " - "\tfrom pydantic.v1 import BaseModel\n" - ), -) diff --git a/libs/core/langchain_core/rate_limiters.py b/libs/core/langchain_core/rate_limiters.py index dd0753fbc41..986d3ff63b7 100644 --- a/libs/core/langchain_core/rate_limiters.py +++ b/libs/core/langchain_core/rate_limiters.py @@ -21,11 +21,8 @@ class BaseRateLimiter(abc.ABC): Current limitations: - Rate limiting information is not surfaced in tracing or callbacks. This means - that the total time it takes to invoke a chat model will encompass both - the time spent waiting for tokens and the time spent making the request. - - - !!! version-added "Added in version 0.2.24" + that the total time it takes to invoke a chat model will encompass both + the time spent waiting for tokens and the time spent making the request. """ @abc.abstractmethod @@ -33,18 +30,18 @@ class BaseRateLimiter(abc.ABC): """Attempt to acquire the necessary tokens for the rate limiter. This method blocks until the required tokens are available if `blocking` - is set to True. + is set to `True`. - If `blocking` is set to False, the method will immediately return the result + If `blocking` is set to `False`, the method will immediately return the result of the attempt to acquire the tokens. Args: blocking: If `True`, the method will block until the tokens are available. If `False`, the method will return immediately with the result of - the attempt. Defaults to `True`. + the attempt. Returns: - True if the tokens were successfully acquired, False otherwise. + `True` if the tokens were successfully acquired, `False` otherwise. """ @abc.abstractmethod @@ -52,18 +49,18 @@ class BaseRateLimiter(abc.ABC): """Attempt to acquire the necessary tokens for the rate limiter. This method blocks until the required tokens are available if `blocking` - is set to True. + is set to `True`. - If `blocking` is set to False, the method will immediately return the result + If `blocking` is set to `False`, the method will immediately return the result of the attempt to acquire the tokens. Args: blocking: If `True`, the method will block until the tokens are available. If `False`, the method will return immediately with the result of - the attempt. Defaults to `True`. + the attempt. Returns: - True if the tokens were successfully acquired, False otherwise. + `True` if the tokens were successfully acquired, `False` otherwise. """ @@ -84,7 +81,7 @@ class InMemoryRateLimiter(BaseRateLimiter): not enough tokens in the bucket, the request is blocked until there are enough tokens. - These *tokens* have NOTHING to do with LLM tokens. They are just + These tokens have nothing to do with LLM tokens. They are just a way to keep track of how many requests can be made at a given time. Current limitations: @@ -109,7 +106,7 @@ class InMemoryRateLimiter(BaseRateLimiter): from langchain_anthropic import ChatAnthropic model = ChatAnthropic( - model_name="claude-3-opus-20240229", rate_limiter=rate_limiter + model_name="claude-sonnet-4-5-20250929", rate_limiter=rate_limiter ) for _ in range(5): @@ -118,9 +115,6 @@ class InMemoryRateLimiter(BaseRateLimiter): toc = time.time() print(toc - tic) ``` - - !!! version-added "Added in version 0.2.24" - """ # noqa: E501 def __init__( @@ -132,7 +126,7 @@ class InMemoryRateLimiter(BaseRateLimiter): ) -> None: """A rate limiter based on a token bucket. - These *tokens* have NOTHING to do with LLM tokens. They are just + These tokens have nothing to do with LLM tokens. They are just a way to keep track of how many requests can be made at a given time. This rate limiter is designed to work in a threaded environment. @@ -145,11 +139,11 @@ class InMemoryRateLimiter(BaseRateLimiter): Args: requests_per_second: The number of tokens to add per second to the bucket. The tokens represent "credit" that can be used to make requests. - check_every_n_seconds: check whether the tokens are available + check_every_n_seconds: Check whether the tokens are available every this many seconds. Can be a float to represent fractions of a second. max_bucket_size: The maximum number of tokens that can be in the bucket. - Must be at least 1. Used to prevent bursts of requests. + Must be at least `1`. Used to prevent bursts of requests. """ # Number of requests that we can make per second. self.requests_per_second = requests_per_second @@ -199,18 +193,18 @@ class InMemoryRateLimiter(BaseRateLimiter): """Attempt to acquire a token from the rate limiter. This method blocks until the required tokens are available if `blocking` - is set to True. + is set to `True`. - If `blocking` is set to False, the method will immediately return the result + If `blocking` is set to `False`, the method will immediately return the result of the attempt to acquire the tokens. Args: blocking: If `True`, the method will block until the tokens are available. If `False`, the method will return immediately with the result of - the attempt. Defaults to `True`. + the attempt. Returns: - True if the tokens were successfully acquired, False otherwise. + `True` if the tokens were successfully acquired, `False` otherwise. """ if not blocking: return self._consume() @@ -223,18 +217,18 @@ class InMemoryRateLimiter(BaseRateLimiter): """Attempt to acquire a token from the rate limiter. Async version. This method blocks until the required tokens are available if `blocking` - is set to True. + is set to `True`. - If `blocking` is set to False, the method will immediately return the result + If `blocking` is set to `False`, the method will immediately return the result of the attempt to acquire the tokens. Args: blocking: If `True`, the method will block until the tokens are available. If `False`, the method will return immediately with the result of - the attempt. Defaults to `True`. + the attempt. Returns: - True if the tokens were successfully acquired, False otherwise. + `True` if the tokens were successfully acquired, `False` otherwise. """ if not blocking: return self._consume() diff --git a/libs/core/langchain_core/retrievers.py b/libs/core/langchain_core/retrievers.py index 74b9eb5cdb6..7be6df9a727 100644 --- a/libs/core/langchain_core/retrievers.py +++ b/libs/core/langchain_core/retrievers.py @@ -7,7 +7,6 @@ the backbone of a retriever, but there are other types of retrievers as well. from __future__ import annotations -import warnings from abc import ABC, abstractmethod from inspect import signature from typing import TYPE_CHECKING, Any @@ -15,8 +14,6 @@ from typing import TYPE_CHECKING, Any from pydantic import ConfigDict from typing_extensions import Self, TypedDict, override -from langchain_core._api import deprecated -from langchain_core.callbacks import Callbacks from langchain_core.callbacks.manager import AsyncCallbackManager, CallbackManager from langchain_core.documents import Document from langchain_core.runnables import ( @@ -53,25 +50,25 @@ class LangSmithRetrieverParams(TypedDict, total=False): class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC): - """Abstract base class for a Document retrieval system. + """Abstract base class for a document retrieval system. A retrieval system is defined as something that can take string queries and return - the most 'relevant' Documents from some source. + the most 'relevant' documents from some source. Usage: - A retriever follows the standard Runnable interface, and should be used - via the standard Runnable methods of `invoke`, `ainvoke`, `batch`, `abatch`. + A retriever follows the standard `Runnable` interface, and should be used via the + standard `Runnable` methods of `invoke`, `ainvoke`, `batch`, `abatch`. Implementation: - When implementing a custom retriever, the class should implement - the `_get_relevant_documents` method to define the logic for retrieving documents. + When implementing a custom retriever, the class should implement the + `_get_relevant_documents` method to define the logic for retrieving documents. Optionally, an async native implementations can be provided by overriding the `_aget_relevant_documents` method. - Example: A retriever that returns the first 5 documents from a list of documents + !!! example "Retriever that returns the first 5 documents from a list of documents" ```python from langchain_core.documents import Document @@ -90,7 +87,7 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC): return self.docs[:self.k] ``` - Example: A simple retriever based on a scikit-learn vectorizer + !!! example "Simple retriever based on a scikit-learn vectorizer" ```python from sklearn.metrics.pairwise import cosine_similarity @@ -121,16 +118,20 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC): _new_arg_supported: bool = False _expects_other_args: bool = False tags: list[str] | None = None - """Optional list of tags associated with the retriever. Defaults to `None`. + """Optional list of tags associated with the retriever. + These tags will be associated with each call to this retriever, and passed as arguments to the handlers defined in `callbacks`. + You can use these to eg identify a specific instance of a retriever with its use case. """ metadata: dict[str, Any] | None = None - """Optional metadata associated with the retriever. Defaults to `None`. + """Optional metadata associated with the retriever. + This metadata will be associated with each call to this retriever, and passed as arguments to the handlers defined in `callbacks`. + You can use these to eg identify a specific instance of a retriever with its use case. """ @@ -138,35 +139,6 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC): @override def __init_subclass__(cls, **kwargs: Any) -> None: super().__init_subclass__(**kwargs) - # Version upgrade for old retrievers that implemented the public - # methods directly. - if cls.get_relevant_documents != BaseRetriever.get_relevant_documents: - warnings.warn( - "Retrievers must implement abstract `_get_relevant_documents` method" - " instead of `get_relevant_documents`", - DeprecationWarning, - stacklevel=4, - ) - swap = cls.get_relevant_documents - cls.get_relevant_documents = ( # type: ignore[method-assign] - BaseRetriever.get_relevant_documents - ) - cls._get_relevant_documents = swap # type: ignore[method-assign] - if ( - hasattr(cls, "aget_relevant_documents") - and cls.aget_relevant_documents != BaseRetriever.aget_relevant_documents - ): - warnings.warn( - "Retrievers must implement abstract `_aget_relevant_documents` method" - " instead of `aget_relevant_documents`", - DeprecationWarning, - stacklevel=4, - ) - aswap = cls.aget_relevant_documents - cls.aget_relevant_documents = ( # type: ignore[method-assign] - BaseRetriever.aget_relevant_documents - ) - cls._aget_relevant_documents = aswap # type: ignore[method-assign] parameters = signature(cls._get_relevant_documents).parameters cls._new_arg_supported = parameters.get("run_manager") is not None if ( @@ -207,7 +179,7 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC): Args: input: The query string. - config: Configuration for the retriever. Defaults to `None`. + config: Configuration for the retriever. **kwargs: Additional arguments to pass to the retriever. Returns: @@ -268,7 +240,7 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC): Args: input: The query string. - config: Configuration for the retriever. Defaults to `None`. + config: Configuration for the retriever. **kwargs: Additional arguments to pass to the retriever. Returns: @@ -348,91 +320,3 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC): query, run_manager=run_manager.get_sync(), ) - - @deprecated(since="0.1.46", alternative="invoke", removal="1.0") - def get_relevant_documents( - self, - query: str, - *, - callbacks: Callbacks = None, - tags: list[str] | None = None, - metadata: dict[str, Any] | None = None, - run_name: str | None = None, - **kwargs: Any, - ) -> list[Document]: - """Retrieve documents relevant to a query. - - Users should favor using `.invoke` or `.batch` rather than - `get_relevant_documents directly`. - - Args: - query: string to find relevant documents for. - callbacks: Callback manager or list of callbacks. Defaults to `None`. - tags: Optional list of tags associated with the retriever. - These tags will be associated with each call to this retriever, - and passed as arguments to the handlers defined in `callbacks`. - Defaults to `None`. - metadata: Optional metadata associated with the retriever. - This metadata will be associated with each call to this retriever, - and passed as arguments to the handlers defined in `callbacks`. - Defaults to `None`. - run_name: Optional name for the run. Defaults to `None`. - **kwargs: Additional arguments to pass to the retriever. - - Returns: - List of relevant documents. - """ - config: RunnableConfig = {} - if callbacks: - config["callbacks"] = callbacks - if tags: - config["tags"] = tags - if metadata: - config["metadata"] = metadata - if run_name: - config["run_name"] = run_name - return self.invoke(query, config, **kwargs) - - @deprecated(since="0.1.46", alternative="ainvoke", removal="1.0") - async def aget_relevant_documents( - self, - query: str, - *, - callbacks: Callbacks = None, - tags: list[str] | None = None, - metadata: dict[str, Any] | None = None, - run_name: str | None = None, - **kwargs: Any, - ) -> list[Document]: - """Asynchronously get documents relevant to a query. - - Users should favor using `.ainvoke` or `.abatch` rather than - `aget_relevant_documents directly`. - - Args: - query: string to find relevant documents for. - callbacks: Callback manager or list of callbacks. - tags: Optional list of tags associated with the retriever. - These tags will be associated with each call to this retriever, - and passed as arguments to the handlers defined in `callbacks`. - Defaults to `None`. - metadata: Optional metadata associated with the retriever. - This metadata will be associated with each call to this retriever, - and passed as arguments to the handlers defined in `callbacks`. - Defaults to `None`. - run_name: Optional name for the run. Defaults to `None`. - **kwargs: Additional arguments to pass to the retriever. - - Returns: - List of relevant documents. - """ - config: RunnableConfig = {} - if callbacks: - config["callbacks"] = callbacks - if tags: - config["tags"] = tags - if metadata: - config["metadata"] = metadata - if run_name: - config["run_name"] = run_name - return await self.ainvoke(query, config, **kwargs) diff --git a/libs/core/langchain_core/runnables/base.py b/libs/core/langchain_core/runnables/base.py index d5ca1d48c7e..71fb4978cc0 100644 --- a/libs/core/langchain_core/runnables/base.py +++ b/libs/core/langchain_core/runnables/base.py @@ -118,6 +118,8 @@ if TYPE_CHECKING: Other = TypeVar("Other") +_RUNNABLE_GENERIC_NUM_ARGS = 2 # Input and Output + class Runnable(ABC, Generic[Input, Output]): """A unit of work that can be invoked, batched, streamed, transformed and composed. @@ -147,11 +149,11 @@ class Runnable(ABC, Generic[Input, Output]): the `input_schema` property, the `output_schema` property and `config_schema` method. - LCEL and Composition - ==================== + Composition + =========== + + Runnable objects can be composed together to create chains in a declarative way. - The LangChain Expression Language (LCEL) is a declarative way to compose - `Runnable` objectsinto chains. Any chain constructed this way will automatically have sync, async, batch, and streaming support. @@ -235,21 +237,21 @@ class Runnable(ABC, Generic[Input, Output]): You can set the global debug flag to True to enable debug output for all chains: - ```python - from langchain_core.globals import set_debug + ```python + from langchain_core.globals import set_debug - set_debug(True) - ``` + set_debug(True) + ``` Alternatively, you can pass existing or custom callbacks to any given chain: - ```python - from langchain_core.tracers import ConsoleCallbackHandler + ```python + from langchain_core.tracers import ConsoleCallbackHandler - chain.invoke(..., config={"callbacks": [ConsoleCallbackHandler()]}) - ``` + chain.invoke(..., config={"callbacks": [ConsoleCallbackHandler()]}) + ``` - For a UI (and much more) checkout [LangSmith](https://docs.smith.langchain.com/). + For a UI (and much more) checkout [LangSmith](https://docs.langchain.com/langsmith/home). """ @@ -304,20 +306,23 @@ class Runnable(ABC, Generic[Input, Output]): TypeError: If the input type cannot be inferred. """ # First loop through all parent classes and if any of them is - # a pydantic model, we will pick up the generic parameterization + # a Pydantic model, we will pick up the generic parameterization # from that model via the __pydantic_generic_metadata__ attribute. for base in self.__class__.mro(): if hasattr(base, "__pydantic_generic_metadata__"): metadata = base.__pydantic_generic_metadata__ - if "args" in metadata and len(metadata["args"]) == 2: + if ( + "args" in metadata + and len(metadata["args"]) == _RUNNABLE_GENERIC_NUM_ARGS + ): return metadata["args"][0] - # If we didn't find a pydantic model in the parent classes, + # If we didn't find a Pydantic model in the parent classes, # then loop through __orig_bases__. This corresponds to # Runnables that are not pydantic models. for cls in self.__class__.__orig_bases__: # type: ignore[attr-defined] type_args = get_args(cls) - if type_args and len(type_args) == 2: + if type_args and len(type_args) == _RUNNABLE_GENERIC_NUM_ARGS: return type_args[0] msg = ( @@ -340,12 +345,15 @@ class Runnable(ABC, Generic[Input, Output]): for base in self.__class__.mro(): if hasattr(base, "__pydantic_generic_metadata__"): metadata = base.__pydantic_generic_metadata__ - if "args" in metadata and len(metadata["args"]) == 2: + if ( + "args" in metadata + and len(metadata["args"]) == _RUNNABLE_GENERIC_NUM_ARGS + ): return metadata["args"][1] for cls in self.__class__.__orig_bases__: # type: ignore[attr-defined] type_args = get_args(cls) - if type_args and len(type_args) == 2: + if type_args and len(type_args) == _RUNNABLE_GENERIC_NUM_ARGS: return type_args[1] msg = ( @@ -390,7 +398,7 @@ class Runnable(ABC, Generic[Input, Output]): self.get_name("Input"), root=root_type, # create model needs access to appropriate type annotations to be - # able to construct the pydantic model. + # able to construct the Pydantic model. # When we create the model, we pass information about the namespace # where the model is being created, so the type annotations can # be resolved correctly as well. @@ -424,7 +432,7 @@ class Runnable(ABC, Generic[Input, Output]): print(runnable.get_input_jsonschema()) ``` - !!! version-added "Added in version 0.3.0" + !!! version-added "Added in `langchain-core` 0.3.0" """ return self.get_input_schema(config).model_json_schema() @@ -433,7 +441,7 @@ class Runnable(ABC, Generic[Input, Output]): def output_schema(self) -> type[BaseModel]: """Output schema. - The type of output this `Runnable` produces specified as a pydantic model. + The type of output this `Runnable` produces specified as a Pydantic model. """ return self.get_output_schema() @@ -468,7 +476,7 @@ class Runnable(ABC, Generic[Input, Output]): self.get_name("Output"), root=root_type, # create model needs access to appropriate type annotations to be - # able to construct the pydantic model. + # able to construct the Pydantic model. # When we create the model, we pass information about the namespace # where the model is being created, so the type annotations can # be resolved correctly as well. @@ -502,7 +510,7 @@ class Runnable(ABC, Generic[Input, Output]): print(runnable.get_output_jsonschema()) ``` - !!! version-added "Added in version 0.3.0" + !!! version-added "Added in `langchain-core` 0.3.0" """ return self.get_output_schema(config).model_json_schema() @@ -566,7 +574,7 @@ class Runnable(ABC, Generic[Input, Output]): Returns: A JSON schema that represents the config of the `Runnable`. - !!! version-added "Added in version 0.3.0" + !!! version-added "Added in `langchain-core` 0.3.0" """ return self.config_schema(include=include).model_json_schema() @@ -766,7 +774,7 @@ class Runnable(ABC, Generic[Input, Output]): """Assigns new fields to the `dict` output of this `Runnable`. ```python - from langchain_community.llms.fake import FakeStreamingListLLM + from langchain_core.language_models.fake import FakeStreamingListLLM from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import SystemMessagePromptTemplate from langchain_core.runnables import Runnable @@ -776,11 +784,11 @@ class Runnable(ABC, Generic[Input, Output]): SystemMessagePromptTemplate.from_template("You are a nice assistant.") + "{question}" ) - llm = FakeStreamingListLLM(responses=["foo-lish"]) + model = FakeStreamingListLLM(responses=["foo-lish"]) - chain: Runnable = prompt | llm | {"str": StrOutputParser()} + chain: Runnable = prompt | model | {"str": StrOutputParser()} - chain_with_assign = chain.assign(hello=itemgetter("str") | llm) + chain_with_assign = chain.assign(hello=itemgetter("str") | model) print(chain_with_assign.input_schema.model_json_schema()) # {'title': 'PromptInput', 'type': 'object', 'properties': @@ -818,10 +826,12 @@ class Runnable(ABC, Generic[Input, Output]): Args: input: The input to the `Runnable`. config: A config to use when invoking the `Runnable`. + The config supports standard keys like `'tags'`, `'metadata'` for tracing purposes, `'max_concurrency'` for controlling how much work to - do in parallel, and other keys. Please refer to the `RunnableConfig` - for more details. Defaults to `None`. + do in parallel, and other keys. + + Please refer to `RunnableConfig` for more details. Returns: The output of the `Runnable`. @@ -838,10 +848,12 @@ class Runnable(ABC, Generic[Input, Output]): Args: input: The input to the `Runnable`. config: A config to use when invoking the `Runnable`. + The config supports standard keys like `'tags'`, `'metadata'` for tracing purposes, `'max_concurrency'` for controlling how much work to - do in parallel, and other keys. Please refer to the `RunnableConfig` - for more details. Defaults to `None`. + do in parallel, and other keys. + + Please refer to `RunnableConfig` for more details. Returns: The output of the `Runnable`. @@ -860,7 +872,7 @@ class Runnable(ABC, Generic[Input, Output]): The default implementation of batch works well for IO bound runnables. - Subclasses should override this method if they can batch more efficiently; + Subclasses must override this method if they can batch more efficiently; e.g., if the underlying `Runnable` uses an API which supports a batch mode. Args: @@ -868,10 +880,10 @@ class Runnable(ABC, Generic[Input, Output]): config: A config to use when invoking the `Runnable`. The config supports standard keys like `'tags'`, `'metadata'` for tracing purposes, `'max_concurrency'` for controlling how much work - to do in parallel, and other keys. Please refer to the - `RunnableConfig` for more details. Defaults to `None`. + to do in parallel, and other keys. + + Please refer to `RunnableConfig` for more details. return_exceptions: Whether to return exceptions instead of raising them. - Defaults to `False`. **kwargs: Additional keyword arguments to pass to the `Runnable`. Returns: @@ -933,12 +945,13 @@ class Runnable(ABC, Generic[Input, Output]): Args: inputs: A list of inputs to the `Runnable`. config: A config to use when invoking the `Runnable`. + The config supports standard keys like `'tags'`, `'metadata'` for tracing purposes, `'max_concurrency'` for controlling how much work to - do in parallel, and other keys. Please refer to the `RunnableConfig` - for more details. Defaults to `None`. + do in parallel, and other keys. + + Please refer to `RunnableConfig` for more details. return_exceptions: Whether to return exceptions instead of raising them. - Defaults to `False`. **kwargs: Additional keyword arguments to pass to the `Runnable`. Yields: @@ -994,18 +1007,19 @@ class Runnable(ABC, Generic[Input, Output]): The default implementation of `batch` works well for IO bound runnables. - Subclasses should override this method if they can batch more efficiently; + Subclasses must override this method if they can batch more efficiently; e.g., if the underlying `Runnable` uses an API which supports a batch mode. Args: inputs: A list of inputs to the `Runnable`. config: A config to use when invoking the `Runnable`. + The config supports standard keys like `'tags'`, `'metadata'` for tracing purposes, `'max_concurrency'` for controlling how much work to - do in parallel, and other keys. Please refer to the `RunnableConfig` - for more details. Defaults to `None`. + do in parallel, and other keys. + + Please refer to `RunnableConfig` for more details. return_exceptions: Whether to return exceptions instead of raising them. - Defaults to `False`. **kwargs: Additional keyword arguments to pass to the `Runnable`. Returns: @@ -1064,12 +1078,13 @@ class Runnable(ABC, Generic[Input, Output]): Args: inputs: A list of inputs to the `Runnable`. config: A config to use when invoking the `Runnable`. + The config supports standard keys like `'tags'`, `'metadata'` for tracing purposes, `'max_concurrency'` for controlling how much work to - do in parallel, and other keys. Please refer to the `RunnableConfig` - for more details. Defaults to `None`. + do in parallel, and other keys. + + Please refer to `RunnableConfig` for more details. return_exceptions: Whether to return exceptions instead of raising them. - Defaults to `False`. **kwargs: Additional keyword arguments to pass to the `Runnable`. Yields: @@ -1116,11 +1131,11 @@ class Runnable(ABC, Generic[Input, Output]): ) -> Iterator[Output]: """Default implementation of `stream`, which calls `invoke`. - Subclasses should override this method if they support streaming output. + Subclasses must override this method if they support streaming output. Args: input: The input to the `Runnable`. - config: The config to use for the `Runnable`. Defaults to `None`. + config: The config to use for the `Runnable`. **kwargs: Additional keyword arguments to pass to the `Runnable`. Yields: @@ -1137,11 +1152,11 @@ class Runnable(ABC, Generic[Input, Output]): ) -> AsyncIterator[Output]: """Default implementation of `astream`, which calls `ainvoke`. - Subclasses should override this method if they support streaming output. + Subclasses must override this method if they support streaming output. Args: input: The input to the `Runnable`. - config: The config to use for the `Runnable`. Defaults to `None`. + config: The config to use for the `Runnable`. **kwargs: Additional keyword arguments to pass to the `Runnable`. Yields: @@ -1273,22 +1288,20 @@ class Runnable(ABC, Generic[Input, Output]): A `StreamEvent` is a dictionary with the following schema: - - `event`: **str** - Event names are of the format: + - `event`: Event names are of the format: `on_[runnable_type]_(start|stream|end)`. - - `name`: **str** - The name of the `Runnable` that generated the event. - - `run_id`: **str** - randomly generated ID associated with the given - execution of the `Runnable` that emitted the event. A child `Runnable` that gets - invoked as part of the execution of a parent `Runnable` is assigned its own - unique ID. - - `parent_ids`: **list[str]** - The IDs of the parent runnables that generated - the event. The root `Runnable` will have an empty list. The order of the parent - IDs is from the root to the immediate parent. Only available for v2 version of - the API. The v1 version of the API will return an empty list. - - `tags`: **list[str] | None** - The tags of the `Runnable` that generated - the event. - - `metadata`: **dict[str, Any] | None** - The metadata of the `Runnable` that - generated the event. - - `data`: **dict[str, Any]** + - `name`: The name of the `Runnable` that generated the event. + - `run_id`: Randomly generated ID associated with the given execution of the + `Runnable` that emitted the event. A child `Runnable` that gets invoked as + part of the execution of a parent `Runnable` is assigned its own unique ID. + - `parent_ids`: The IDs of the parent runnables that generated the event. The + root `Runnable` will have an empty list. The order of the parent IDs is from + the root to the immediate parent. Only available for v2 version of the API. + The v1 version of the API will return an empty list. + - `tags`: The tags of the `Runnable` that generated the event. + - `metadata`: The metadata of the `Runnable` that generated the event. + - `data`: The data associated with the event. The contents of this field + depend on the type of event. See the table below for more details. Below is a table that illustrates some events that might be emitted by various chains. Metadata fields have been omitted from the table for brevity. @@ -1297,39 +1310,23 @@ class Runnable(ABC, Generic[Input, Output]): !!! note This reference table is for the v2 version of the schema. - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | event | name | chunk | input | output | - +==========================+==================+=====================================+===================================================+=====================================================+ - | `on_chat_model_start` | [model name] | | `{"messages": [[SystemMessage, HumanMessage]]}` | | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_chat_model_stream` | [model name] | `AIMessageChunk(content="hello")` | | | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_chat_model_end` | [model name] | | `{"messages": [[SystemMessage, HumanMessage]]}` | `AIMessageChunk(content="hello world")` | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_llm_start` | [model name] | | `{'input': 'hello'}` | | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_llm_stream` | [model name] | `'Hello' ` | | | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_llm_end` | [model name] | | `'Hello human!'` | | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_chain_start` | format_docs | | | | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_chain_stream` | format_docs | `'hello world!, goodbye world!'` | | | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_chain_end` | format_docs | | `[Document(...)]` | `'hello world!, goodbye world!'` | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_tool_start` | some_tool | | `{"x": 1, "y": "2"}` | | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_tool_end` | some_tool | | | `{"x": 1, "y": "2"}` | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_retriever_start` | [retriever name] | | `{"query": "hello"}` | | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_retriever_end` | [retriever name] | | `{"query": "hello"}` | `[Document(...), ..]` | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_prompt_start` | [template_name] | | `{"question": "hello"}` | | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ - | `on_prompt_end` | [template_name] | | `{"question": "hello"}` | `ChatPromptValue(messages: [SystemMessage, ...])` | - +--------------------------+------------------+-------------------------------------+---------------------------------------------------+-----------------------------------------------------+ + | event | name | chunk | input | output | + | ---------------------- | -------------------- | ----------------------------------- | ------------------------------------------------- | --------------------------------------------------- | + | `on_chat_model_start` | `'[model name]'` | | `{"messages": [[SystemMessage, HumanMessage]]}` | | + | `on_chat_model_stream` | `'[model name]'` | `AIMessageChunk(content="hello")` | | | + | `on_chat_model_end` | `'[model name]'` | | `{"messages": [[SystemMessage, HumanMessage]]}` | `AIMessageChunk(content="hello world")` | + | `on_llm_start` | `'[model name]'` | | `{'input': 'hello'}` | | + | `on_llm_stream` | `'[model name]'` | `'Hello' ` | | | + | `on_llm_end` | `'[model name]'` | | `'Hello human!'` | | + | `on_chain_start` | `'format_docs'` | | | | + | `on_chain_stream` | `'format_docs'` | `'hello world!, goodbye world!'` | | | + | `on_chain_end` | `'format_docs'` | | `[Document(...)]` | `'hello world!, goodbye world!'` | + | `on_tool_start` | `'some_tool'` | | `{"x": 1, "y": "2"}` | | + | `on_tool_end` | `'some_tool'` | | | `{"x": 1, "y": "2"}` | + | `on_retriever_start` | `'[retriever name]'` | | `{"query": "hello"}` | | + | `on_retriever_end` | `'[retriever name]'` | | `{"query": "hello"}` | `[Document(...), ..]` | + | `on_prompt_start` | `'[template_name]'` | | `{"question": "hello"}` | | + | `on_prompt_end` | `'[template_name]'` | | `{"question": "hello"}` | `ChatPromptValue(messages: [SystemMessage, ...])` | In addition to the standard events, users can also dispatch custom events (see example below). @@ -1337,13 +1334,10 @@ class Runnable(ABC, Generic[Input, Output]): A custom event has following format: - +-----------+------+-----------------------------------------------------------------------------------------------------------+ - | Attribute | Type | Description | - +===========+======+===========================================================================================================+ - | name | str | A user defined name for the event. | - +-----------+------+-----------------------------------------------------------------------------------------------------------+ - | data | Any | The data associated with the event. This can be anything, though we suggest making it JSON serializable. | - +-----------+------+-----------------------------------------------------------------------------------------------------------+ + | Attribute | Type | Description | + | ----------- | ------ | --------------------------------------------------------------------------------------------------------- | + | `name` | `str` | A user defined name for the event. | + | `data` | `Any` | The data associated with the event. This can be anything, though we suggest making it JSON serializable. | Here are declarations associated with the standard events shown above: @@ -1378,7 +1372,8 @@ class Runnable(ABC, Generic[Input, Output]): ).with_config({"run_name": "my_template", "tags": ["my_template"]}) ``` - Example: + For instance: + ```python from langchain_core.runnables import RunnableLambda @@ -1391,8 +1386,8 @@ class Runnable(ABC, Generic[Input, Output]): events = [event async for event in chain.astream_events("hello", version="v2")] - # will produce the following events (run_id, and parent_ids - # has been omitted for brevity): + # Will produce the following events + # (run_id, and parent_ids has been omitted for brevity): [ { "data": {"input": "hello"}, @@ -1447,7 +1442,7 @@ class Runnable(ABC, Generic[Input, Output]): async for event in slow_thing.astream_events("some_input", version="v2"): print(event) - `` + ``` Args: input: The input to the `Runnable`. @@ -1521,12 +1516,12 @@ class Runnable(ABC, Generic[Input, Output]): Default implementation of transform, which buffers input and calls `astream`. - Subclasses should override this method if they can start producing output while + Subclasses must override this method if they can start producing output while input is still being generated. Args: input: An iterator of inputs to the `Runnable`. - config: The config to use for the `Runnable`. Defaults to `None`. + config: The config to use for the `Runnable`. **kwargs: Additional keyword arguments to pass to the `Runnable`. Yields: @@ -1566,12 +1561,12 @@ class Runnable(ABC, Generic[Input, Output]): Default implementation of atransform, which buffers input and calls `astream`. - Subclasses should override this method if they can start producing output while + Subclasses must override this method if they can start producing output while input is still being generated. Args: input: An async iterator of inputs to the `Runnable`. - config: The config to use for the `Runnable`. Defaults to `None`. + config: The config to use for the `Runnable`. **kwargs: Additional keyword arguments to pass to the `Runnable`. Yields: @@ -1619,16 +1614,16 @@ class Runnable(ABC, Generic[Input, Output]): from langchain_ollama import ChatOllama from langchain_core.output_parsers import StrOutputParser - llm = ChatOllama(model="llama3.1") + model = ChatOllama(model="llama3.1") # Without bind - chain = llm | StrOutputParser() + chain = model | StrOutputParser() chain.invoke("Repeat quoted words exactly: 'One two three four five.'") # Output is 'One two three four five.' # With bind - chain = llm.bind(stop=["three"]) | StrOutputParser() + chain = model.bind(stop=["three"]) | StrOutputParser() chain.invoke("Repeat quoted words exactly: 'One two three four five.'") # Output is 'One two' @@ -1682,11 +1677,11 @@ class Runnable(ABC, Generic[Input, Output]): Args: on_start: Called before the `Runnable` starts running, with the `Run` - object. Defaults to `None`. + object. on_end: Called after the `Runnable` finishes running, with the `Run` - object. Defaults to `None`. + object. on_error: Called if the `Runnable` throws an error, with the `Run` - object. Defaults to `None`. + object. Returns: A new `Runnable` with the listeners bound. @@ -1750,11 +1745,11 @@ class Runnable(ABC, Generic[Input, Output]): Args: on_start: Called asynchronously before the `Runnable` starts running, - with the `Run` object. Defaults to `None`. + with the `Run` object. on_end: Called asynchronously after the `Runnable` finishes running, - with the `Run` object. Defaults to `None`. + with the `Run` object. on_error: Called asynchronously if the `Runnable` throws an error, - with the `Run` object. Defaults to `None`. + with the `Run` object. Returns: A new `Runnable` with the listeners bound. @@ -1766,46 +1761,52 @@ class Runnable(ABC, Generic[Input, Output]): import time import asyncio + def format_t(timestamp: float) -> str: return datetime.fromtimestamp(timestamp, tz=timezone.utc).isoformat() + async def test_runnable(time_to_sleep: int): print(f"Runnable[{time_to_sleep}s]: starts at {format_t(time.time())}") await asyncio.sleep(time_to_sleep) print(f"Runnable[{time_to_sleep}s]: ends at {format_t(time.time())}") + async def fn_start(run_obj: Runnable): print(f"on start callback starts at {format_t(time.time())}") await asyncio.sleep(3) print(f"on start callback ends at {format_t(time.time())}") + async def fn_end(run_obj: Runnable): print(f"on end callback starts at {format_t(time.time())}") await asyncio.sleep(2) print(f"on end callback ends at {format_t(time.time())}") + runnable = RunnableLambda(test_runnable).with_alisteners( - on_start=fn_start, - on_end=fn_end + on_start=fn_start, on_end=fn_end ) + + async def concurrent_runs(): await asyncio.gather(runnable.ainvoke(2), runnable.ainvoke(3)) - asyncio.run(concurrent_runs()) - Result: - on start callback starts at 2025-03-01T07:05:22.875378+00:00 - on start callback starts at 2025-03-01T07:05:22.875495+00:00 - on start callback ends at 2025-03-01T07:05:25.878862+00:00 - on start callback ends at 2025-03-01T07:05:25.878947+00:00 - Runnable[2s]: starts at 2025-03-01T07:05:25.879392+00:00 - Runnable[3s]: starts at 2025-03-01T07:05:25.879804+00:00 - Runnable[2s]: ends at 2025-03-01T07:05:27.881998+00:00 - on end callback starts at 2025-03-01T07:05:27.882360+00:00 - Runnable[3s]: ends at 2025-03-01T07:05:28.881737+00:00 - on end callback starts at 2025-03-01T07:05:28.882428+00:00 - on end callback ends at 2025-03-01T07:05:29.883893+00:00 - on end callback ends at 2025-03-01T07:05:30.884831+00:00 + asyncio.run(concurrent_runs()) + # Result: + # on start callback starts at 2025-03-01T07:05:22.875378+00:00 + # on start callback starts at 2025-03-01T07:05:22.875495+00:00 + # on start callback ends at 2025-03-01T07:05:25.878862+00:00 + # on start callback ends at 2025-03-01T07:05:25.878947+00:00 + # Runnable[2s]: starts at 2025-03-01T07:05:25.879392+00:00 + # Runnable[3s]: starts at 2025-03-01T07:05:25.879804+00:00 + # Runnable[2s]: ends at 2025-03-01T07:05:27.881998+00:00 + # on end callback starts at 2025-03-01T07:05:27.882360+00:00 + # Runnable[3s]: ends at 2025-03-01T07:05:28.881737+00:00 + # on end callback starts at 2025-03-01T07:05:28.882428+00:00 + # on end callback ends at 2025-03-01T07:05:29.883893+00:00 + # on end callback ends at 2025-03-01T07:05:30.884831+00:00 ``` """ return RunnableBinding( @@ -1833,11 +1834,11 @@ class Runnable(ABC, Generic[Input, Output]): """Bind input and output types to a `Runnable`, returning a new `Runnable`. Args: - input_type: The input type to bind to the `Runnable`. Defaults to `None`. - output_type: The output type to bind to the `Runnable`. Defaults to `None`. + input_type: The input type to bind to the `Runnable`. + output_type: The output type to bind to the `Runnable`. Returns: - A new Runnable with the types bound. + A new `Runnable` with the types bound. """ return RunnableBinding( bound=self, @@ -1858,17 +1859,16 @@ class Runnable(ABC, Generic[Input, Output]): Args: retry_if_exception_type: A tuple of exception types to retry on. - Defaults to (Exception,). wait_exponential_jitter: Whether to add jitter to the wait - time between retries. Defaults to `True`. + time between retries. stop_after_attempt: The maximum number of attempts to make before - giving up. Defaults to 3. + giving up. exponential_jitter_params: Parameters for `tenacity.wait_exponential_jitter`. Namely: `initial`, `max`, - `exp_base`, and `jitter` (all float values). + `exp_base`, and `jitter` (all `float` values). Returns: - A new Runnable that retries the original Runnable on exceptions. + A new `Runnable` that retries the original `Runnable` on exceptions. Example: ```python @@ -1950,16 +1950,17 @@ class Runnable(ABC, Generic[Input, Output]): fallbacks: A sequence of runnables to try if the original `Runnable` fails. exceptions_to_handle: A tuple of exception types to handle. - Defaults to `(Exception,)`. - exception_key: If string is specified then handled exceptions will be passed - to fallbacks as part of the input under the specified key. + exception_key: If `string` is specified then handled exceptions will be + passed to fallbacks as part of the input under the specified key. + If `None`, exceptions will not be passed to fallbacks. + If used, the base `Runnable` and its fallbacks must accept a - dictionary as input. Defaults to `None`. + dictionary as input. Returns: A new `Runnable` that will try the original `Runnable`, and then each - Fallback in order, upon failures. + Fallback in order, upon failures. Example: ```python @@ -1987,16 +1988,17 @@ class Runnable(ABC, Generic[Input, Output]): fallbacks: A sequence of runnables to try if the original `Runnable` fails. exceptions_to_handle: A tuple of exception types to handle. - exception_key: If string is specified then handled exceptions will be passed - to fallbacks as part of the input under the specified key. + exception_key: If `string` is specified then handled exceptions will be + passed to fallbacks as part of the input under the specified key. + If `None`, exceptions will not be passed to fallbacks. + If used, the base `Runnable` and its fallbacks must accept a dictionary as input. Returns: A new `Runnable` that will try the original `Runnable`, and then each - Fallback in order, upon failures. - + Fallback in order, upon failures. """ # Import locally to prevent circular import from langchain_core.runnables.fallbacks import ( # noqa: PLC0415 @@ -2456,16 +2458,20 @@ class Runnable(ABC, Generic[Input, Output]): `as_tool` will instantiate a `BaseTool` with a name, description, and `args_schema` from a `Runnable`. Where possible, schemas are inferred - from `runnable.get_input_schema`. Alternatively (e.g., if the - `Runnable` takes a dict as input and the specific dict keys are not typed), - the schema can be specified directly with `args_schema`. You can also - pass `arg_types` to just specify the required arguments and their types. + from `runnable.get_input_schema`. + + Alternatively (e.g., if the `Runnable` takes a dict as input and the specific + `dict` keys are not typed), the schema can be specified directly with + `args_schema`. + + You can also pass `arg_types` to just specify the required arguments and their + types. Args: - args_schema: The schema for the tool. Defaults to `None`. - name: The name of the tool. Defaults to `None`. - description: The description of the tool. Defaults to `None`. - arg_types: A dictionary of argument names to types. Defaults to `None`. + args_schema: The schema for the tool. + name: The name of the tool. + description: The description of the tool. + arg_types: A dictionary of argument names to types. Returns: A `BaseTool` instance. @@ -2528,7 +2534,7 @@ class Runnable(ABC, Generic[Input, Output]): as_tool.invoke({"a": 3, "b": [1, 2]}) ``` - String input: + `str` input: ```python from langchain_core.runnables import RunnableLambda @@ -2546,9 +2552,6 @@ class Runnable(ABC, Generic[Input, Output]): as_tool = runnable.as_tool() as_tool.invoke("b") ``` - - !!! version-added "Added in version 0.2.14" - """ # Avoid circular import from langchain_core.tools import convert_runnable_to_tool # noqa: PLC0415 @@ -2654,9 +2657,7 @@ class RunnableSerializable(Serializable, Runnable[Input, Output]): which: The `ConfigurableField` instance that will be used to select the alternative. default_key: The default key to use if no alternative is selected. - Defaults to `'default'`. prefix_keys: Whether to prefix the keys with the `ConfigurableField` id. - Defaults to `False`. **kwargs: A dictionary of keys to `Runnable` instances or callables that return `Runnable` instances. @@ -2669,7 +2670,7 @@ class RunnableSerializable(Serializable, Runnable[Input, Output]): from langchain_openai import ChatOpenAI model = ChatAnthropic( - model_name="claude-3-7-sonnet-20250219" + model_name="claude-sonnet-4-5-20250929" ).configurable_alternatives( ConfigurableField(id="llm"), default_key="anthropic", @@ -2782,6 +2783,9 @@ def _seq_output_schema( return last.get_output_schema(config) +_RUNNABLE_SEQUENCE_MIN_STEPS = 2 + + class RunnableSequence(RunnableSerializable[Input, Output]): """Sequence of `Runnable` objects, where the output of one is the input of the next. @@ -2888,10 +2892,10 @@ class RunnableSequence(RunnableSerializable[Input, Output]): Args: steps: The steps to include in the sequence. - name: The name of the `Runnable`. Defaults to `None`. - first: The first `Runnable` in the sequence. Defaults to `None`. - middle: The middle `Runnable` objects in the sequence. Defaults to `None`. - last: The last Runnable in the sequence. Defaults to `None`. + name: The name of the `Runnable`. + first: The first `Runnable` in the sequence. + middle: The middle `Runnable` objects in the sequence. + last: The last `Runnable` in the sequence. Raises: ValueError: If the sequence has less than 2 steps. @@ -2904,8 +2908,11 @@ class RunnableSequence(RunnableSerializable[Input, Output]): steps_flat.extend(step.steps) else: steps_flat.append(coerce_to_runnable(step)) - if len(steps_flat) < 2: - msg = f"RunnableSequence must have at least 2 steps, got {len(steps_flat)}" + if len(steps_flat) < _RUNNABLE_SEQUENCE_MIN_STEPS: + msg = ( + f"RunnableSequence must have at least {_RUNNABLE_SEQUENCE_MIN_STEPS} " + f"steps, got {len(steps_flat)}" + ) raise ValueError(msg) super().__init__( first=steps_flat[0], @@ -2917,7 +2924,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]): @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` @@ -2936,7 +2943,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]): @classmethod @override def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True model_config = ConfigDict( @@ -2960,7 +2967,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]): """Get the input schema of the `Runnable`. Args: - config: The config to use. Defaults to `None`. + config: The config to use. Returns: The input schema of the `Runnable`. @@ -2975,7 +2982,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]): """Get the output schema of the `Runnable`. Args: - config: The config to use. Defaults to `None`. + config: The config to use. Returns: The output schema of the `Runnable`. @@ -3002,7 +3009,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]): """Get the graph representation of the `Runnable`. Args: - config: The config to use. Defaults to `None`. + config: The config to use. Returns: The graph representation of the `Runnable`. @@ -3532,7 +3539,7 @@ class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]): Returns a mapping of their outputs. - `RunnableParallel` is one of the two main composition primitives for the LCEL, + `RunnableParallel` is one of the two main composition primitives, alongside `RunnableSequence`. It invokes `Runnable`s concurrently, providing the same input to each. @@ -3629,7 +3636,7 @@ class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]): """Create a `RunnableParallel`. Args: - steps__: The steps to include. Defaults to `None`. + steps__: The steps to include. **kwargs: Additional steps to include. """ @@ -3642,13 +3649,13 @@ class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]): @classmethod @override def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` @@ -3664,8 +3671,8 @@ class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]): """Get the name of the `Runnable`. Args: - suffix: The suffix to use. Defaults to `None`. - name: The name to use. Defaults to `None`. + suffix: The suffix to use. + name: The name to use. Returns: The name of the `Runnable`. @@ -3689,7 +3696,7 @@ class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]): """Get the input schema of the `Runnable`. Args: - config: The config to use. Defaults to `None`. + config: The config to use. Returns: The input schema of the `Runnable`. @@ -3700,6 +3707,12 @@ class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]): == "object" for s in self.steps__.values() ): + for step in self.steps__.values(): + fields = step.get_input_schema(config).model_fields + root_field = fields.get("root") + if root_field is not None and root_field.annotation != Any: + return super().get_input_schema(config) + # This is correct, but pydantic typings/mypy don't think so. return create_model_v2( self.get_name("Input"), @@ -3720,7 +3733,7 @@ class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]): """Get the output schema of the `Runnable`. Args: - config: The config to use. Defaults to `None`. + config: The config to use. Returns: The output schema of the `Runnable`. @@ -3747,7 +3760,7 @@ class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]): """Get the graph representation of the `Runnable`. Args: - config: The config to use. Defaults to `None`. + config: The config to use. Returns: The graph representation of the `Runnable`. @@ -4157,8 +4170,8 @@ class RunnableGenerator(Runnable[Input, Output]): Args: transform: The transform function. - atransform: The async transform function. Defaults to `None`. - name: The name of the `Runnable`. Defaults to `None`. + atransform: The async transform function. + name: The name of the `Runnable`. Raises: TypeError: If the transform is not a generator function. @@ -4435,8 +4448,8 @@ class RunnableLambda(Runnable[Input, Output]): Args: func: Either sync or async callable afunc: An async callable that takes an input and returns an output. - Defaults to `None`. - name: The name of the `Runnable`. Defaults to `None`. + + name: The name of the `Runnable`. Raises: TypeError: If the `func` is not a callable type. @@ -4493,10 +4506,10 @@ class RunnableLambda(Runnable[Input, Output]): @override def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]: - """The pydantic schema for the input to this `Runnable`. + """The Pydantic schema for the input to this `Runnable`. Args: - config: The config to use. Defaults to `None`. + config: The config to use. Returns: The input schema for this `Runnable`. @@ -4509,7 +4522,7 @@ class RunnableLambda(Runnable[Input, Output]): # on itemgetter objects, so we have to parse the repr items = str(func).replace("operator.itemgetter(", "")[:-1].split(", ") if all( - item[0] == "'" and item[-1] == "'" and len(item) > 2 for item in items + item[0] == "'" and item[-1] == "'" and item != "''" for item in items ): fields = {item[1:-1]: (Any, ...) for item in items} # It's a dict, lol @@ -4830,7 +4843,7 @@ class RunnableLambda(Runnable[Input, Output]): Args: input: The input to this `Runnable`. - config: The config to use. Defaults to `None`. + config: The config to use. **kwargs: Additional keyword arguments. Returns: @@ -4861,7 +4874,7 @@ class RunnableLambda(Runnable[Input, Output]): Args: input: The input to this `Runnable`. - config: The config to use. Defaults to `None`. + config: The config to use. **kwargs: Additional keyword arguments. Returns: @@ -5127,7 +5140,7 @@ class RunnableEachBase(RunnableSerializable[list[Input], list[Output]]): None, ), # create model needs access to appropriate type annotations to be - # able to construct the pydantic model. + # able to construct the Pydantic model. # When we create the model, we pass information about the namespace # where the model is being created, so the type annotations can # be resolved correctly as well. @@ -5150,7 +5163,7 @@ class RunnableEachBase(RunnableSerializable[list[Input], list[Output]]): self.get_name("Output"), root=list[schema], # type: ignore[valid-type] # create model needs access to appropriate type annotations to be - # able to construct the pydantic model. + # able to construct the Pydantic model. # When we create the model, we pass information about the namespace # where the model is being created, so the type annotations can # be resolved correctly as well. @@ -5171,13 +5184,13 @@ class RunnableEachBase(RunnableSerializable[list[Input], list[Output]]): @classmethod @override def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` @@ -5303,11 +5316,11 @@ class RunnableEach(RunnableEachBase[Input, Output]): Args: on_start: Called before the `Runnable` starts running, with the `Run` - object. Defaults to `None`. + object. on_end: Called after the `Runnable` finishes running, with the `Run` - object. Defaults to `None`. + object. on_error: Called if the `Runnable` throws an error, with the `Run` - object. Defaults to `None`. + object. Returns: A new `Runnable` with the listeners bound. @@ -5336,11 +5349,11 @@ class RunnableEach(RunnableEachBase[Input, Output]): Args: on_start: Called asynchronously before the `Runnable` starts running, - with the `Run` object. Defaults to `None`. + with the `Run` object. on_end: Called asynchronously after the `Runnable` finishes running, - with the `Run` object. Defaults to `None`. + with the `Run` object. on_error: Called asynchronously if the `Runnable` throws an error, - with the `Run` object. Defaults to `None`. + with the `Run` object. Returns: A new `Runnable` with the listeners bound. @@ -5354,7 +5367,7 @@ class RunnableEach(RunnableEachBase[Input, Output]): class RunnableBindingBase(RunnableSerializable[Input, Output]): # type: ignore[no-redef] - """`Runnable` that delegates calls to another `Runnable` with a set of kwargs. + """`Runnable` that delegates calls to another `Runnable` with a set of `**kwargs`. Use only if creating a new `RunnableBinding` subclass with different `__init__` args. @@ -5387,13 +5400,13 @@ class RunnableBindingBase(RunnableSerializable[Input, Output]): # type: ignore[ custom_input_type: Any | None = None """Override the input type of the underlying `Runnable` with a custom type. - The type can be a pydantic model, or a type annotation (e.g., `list[str]`). + The type can be a Pydantic model, or a type annotation (e.g., `list[str]`). """ # Union[Type[Output], BaseModel] + things like list[str] custom_output_type: Any | None = None """Override the output type of the underlying `Runnable` with a custom type. - The type can be a pydantic model, or a type annotation (e.g., `list[str]`). + The type can be a Pydantic model, or a type annotation (e.g., `list[str]`). """ model_config = ConfigDict( @@ -5420,16 +5433,16 @@ class RunnableBindingBase(RunnableSerializable[Input, Output]): # type: ignore[ kwargs: optional kwargs to pass to the underlying `Runnable`, when running the underlying `Runnable` (e.g., via `invoke`, `batch`, `transform`, or `stream` or async variants) - Defaults to `None`. + config: optional config to bind to the underlying `Runnable`. - Defaults to `None`. + config_factories: optional list of config factories to apply to the config before binding to the underlying `Runnable`. - Defaults to `None`. + custom_input_type: Specify to override the input type of the underlying - `Runnable` with a custom type. Defaults to `None`. + `Runnable` with a custom type. custom_output_type: Specify to override the output type of the underlying - `Runnable` with a custom type. Defaults to `None`. + `Runnable` with a custom type. **other_kwargs: Unpacked into the base class. """ super().__init__( @@ -5494,13 +5507,13 @@ class RunnableBindingBase(RunnableSerializable[Input, Output]): # type: ignore[ @classmethod @override def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` @@ -5782,9 +5795,9 @@ class RunnableBinding(RunnableBindingBase[Input, Output]): # type: ignore[no-re `bind`: Bind kwargs to pass to the underlying `Runnable` when running it. ```python - # Create a Runnable binding that invokes the ChatModel with the + # Create a Runnable binding that invokes the chat model with the # additional kwarg `stop=['-']` when running it. - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI model = ChatOpenAI() model.invoke('Say "Parrot-MAGIC"', stop=["-"]) # Should return `Parrot` @@ -5866,11 +5879,11 @@ class RunnableBinding(RunnableBindingBase[Input, Output]): # type: ignore[no-re Args: on_start: Called before the `Runnable` starts running, with the `Run` - object. Defaults to `None`. + object. on_end: Called after the `Runnable` finishes running, with the `Run` - object. Defaults to `None`. + object. on_error: Called if the `Runnable` throws an error, with the `Run` - object. Defaults to `None`. + object. Returns: A new `Runnable` with the listeners bound. @@ -6077,10 +6090,10 @@ def chain( @chain def my_func(fields): prompt = PromptTemplate("Hello, {name}!") - llm = OpenAI() + model = OpenAI() formatted = prompt.invoke(**fields) - for chunk in llm.stream(formatted): + for chunk in model.stream(formatted): yield chunk ``` """ diff --git a/libs/core/langchain_core/runnables/branch.py b/libs/core/langchain_core/runnables/branch.py index deba85521c0..e9396ce5c25 100644 --- a/libs/core/langchain_core/runnables/branch.py +++ b/libs/core/langchain_core/runnables/branch.py @@ -36,17 +36,19 @@ from langchain_core.runnables.utils import ( get_unique_config_specs, ) +_MIN_BRANCHES = 2 + class RunnableBranch(RunnableSerializable[Input, Output]): - """Runnable that selects which branch to run based on a condition. + """`Runnable` that selects which branch to run based on a condition. - The Runnable is initialized with a list of (condition, Runnable) pairs and + The `Runnable` is initialized with a list of `(condition, Runnable)` pairs and a default branch. When operating on an input, the first condition that evaluates to True is - selected, and the corresponding Runnable is run on the input. + selected, and the corresponding `Runnable` is run on the input. - If no condition evaluates to True, the default branch is run on the input. + If no condition evaluates to `True`, the default branch is run on the input. Examples: ```python @@ -65,9 +67,9 @@ class RunnableBranch(RunnableSerializable[Input, Output]): """ branches: Sequence[tuple[Runnable[Input, bool], Runnable[Input, Output]]] - """A list of (condition, Runnable) pairs.""" + """A list of `(condition, Runnable)` pairs.""" default: Runnable[Input, Output] - """A Runnable to run if no condition is met.""" + """A `Runnable` to run if no condition is met.""" def __init__( self, @@ -79,19 +81,19 @@ class RunnableBranch(RunnableSerializable[Input, Output]): ] | RunnableLike, ) -> None: - """A Runnable that runs one of two branches based on a condition. + """A `Runnable` that runs one of two branches based on a condition. Args: - *branches: A list of (condition, Runnable) pairs. - Defaults a Runnable to run if no condition is met. + *branches: A list of `(condition, Runnable)` pairs. + Defaults a `Runnable` to run if no condition is met. Raises: - ValueError: If the number of branches is less than 2. - TypeError: If the default branch is not Runnable, Callable or Mapping. - TypeError: If a branch is not a tuple or list. - ValueError: If a branch is not of length 2. + ValueError: If the number of branches is less than `2`. + TypeError: If the default branch is not `Runnable`, `Callable` or `Mapping`. + TypeError: If a branch is not a `tuple` or `list`. + ValueError: If a branch is not of length `2`. """ - if len(branches) < 2: + if len(branches) < _MIN_BRANCHES: msg = "RunnableBranch requires at least two branches" raise ValueError(msg) @@ -118,7 +120,7 @@ class RunnableBranch(RunnableSerializable[Input, Output]): ) raise TypeError(msg) - if len(branch) != 2: + if len(branch) != _MIN_BRANCHES: msg = ( f"RunnableBranch branches must be " f"tuples or lists of length 2, not {len(branch)}" @@ -140,13 +142,13 @@ class RunnableBranch(RunnableSerializable[Input, Output]): @classmethod def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` @@ -187,12 +189,12 @@ class RunnableBranch(RunnableSerializable[Input, Output]): def invoke( self, input: Input, config: RunnableConfig | None = None, **kwargs: Any ) -> Output: - """First evaluates the condition, then delegate to true or false branch. + """First evaluates the condition, then delegate to `True` or `False` branch. Args: - input: The input to the Runnable. - config: The configuration for the Runnable. Defaults to `None`. - **kwargs: Additional keyword arguments to pass to the Runnable. + input: The input to the `Runnable`. + config: The configuration for the `Runnable`. + **kwargs: Additional keyword arguments to pass to the `Runnable`. Returns: The output of the branch that was run. @@ -297,12 +299,12 @@ class RunnableBranch(RunnableSerializable[Input, Output]): config: RunnableConfig | None = None, **kwargs: Any | None, ) -> Iterator[Output]: - """First evaluates the condition, then delegate to true or false branch. + """First evaluates the condition, then delegate to `True` or `False` branch. Args: - input: The input to the Runnable. - config: The configuration for the Runnable. Defaults to `None`. - **kwargs: Additional keyword arguments to pass to the Runnable. + input: The input to the `Runnable`. + config: The configuration for the Runna`ble. + **kwargs: Additional keyword arguments to pass to the `Runnable`. Yields: The output of the branch that was run. @@ -381,12 +383,12 @@ class RunnableBranch(RunnableSerializable[Input, Output]): config: RunnableConfig | None = None, **kwargs: Any | None, ) -> AsyncIterator[Output]: - """First evaluates the condition, then delegate to true or false branch. + """First evaluates the condition, then delegate to `True` or `False` branch. Args: - input: The input to the Runnable. - config: The configuration for the Runnable. Defaults to `None`. - **kwargs: Additional keyword arguments to pass to the Runnable. + input: The input to the `Runnable`. + config: The configuration for the `Runnable`. + **kwargs: Additional keyword arguments to pass to the `Runnable`. Yields: The output of the branch that was run. diff --git a/libs/core/langchain_core/runnables/config.py b/libs/core/langchain_core/runnables/config.py index 33f062607ed..f67bf095927 100644 --- a/libs/core/langchain_core/runnables/config.py +++ b/libs/core/langchain_core/runnables/config.py @@ -75,26 +75,26 @@ class RunnableConfig(TypedDict, total=False): max_concurrency: int | None """ Maximum number of parallel calls to make. If not provided, defaults to - ThreadPoolExecutor's default. + `ThreadPoolExecutor`'s default. """ recursion_limit: int """ - Maximum number of times a call can recurse. If not provided, defaults to 25. + Maximum number of times a call can recurse. If not provided, defaults to `25`. """ configurable: dict[str, Any] """ - Runtime values for attributes previously made configurable on this Runnable, - or sub-Runnables, through .configurable_fields() or .configurable_alternatives(). - Check .output_schema() for a description of the attributes that have been made + Runtime values for attributes previously made configurable on this `Runnable`, + or sub-Runnables, through `configurable_fields` or `configurable_alternatives`. + Check `output_schema` for a description of the attributes that have been made configurable. """ run_id: uuid.UUID | None """ Unique identifier for the tracer run for this call. If not provided, a new UUID - will be generated. + will be generated. """ @@ -193,7 +193,7 @@ def ensure_config(config: RunnableConfig | None = None) -> RunnableConfig: """Ensure that a config is a dict with all keys present. Args: - config: The config to ensure. Defaults to `None`. + config: The config to ensure. Returns: The ensured config. @@ -412,7 +412,7 @@ def call_func_with_variable_args( func: The function to call. input: The input to the function. config: The config to pass to the function. - run_manager: The run manager to pass to the function. Defaults to `None`. + run_manager: The run manager to pass to the function. **kwargs: The keyword arguments to pass to the function. Returns: @@ -446,7 +446,7 @@ def acall_func_with_variable_args( func: The function to call. input: The input to the function. config: The config to pass to the function. - run_manager: The run manager to pass to the function. Defaults to `None`. + run_manager: The run manager to pass to the function. **kwargs: The keyword arguments to pass to the function. Returns: @@ -527,16 +527,15 @@ class ContextThreadPoolExecutor(ThreadPoolExecutor): self, fn: Callable[..., T], *iterables: Iterable[Any], - timeout: float | None = None, - chunksize: int = 1, + **kwargs: Any, ) -> Iterator[T]: """Map a function to multiple iterables. Args: fn: The function to map. *iterables: The iterables to map over. - timeout: The timeout for the map. Defaults to `None`. - chunksize: The chunksize for the map. Defaults to 1. + timeout: The timeout for the map. + chunksize: The chunksize for the map. Returns: The iterator for the mapped function. @@ -549,8 +548,7 @@ class ContextThreadPoolExecutor(ThreadPoolExecutor): return super().map( _wrapped_fn, *iterables, - timeout=timeout, - chunksize=chunksize, + **kwargs, ) diff --git a/libs/core/langchain_core/runnables/configurable.py b/libs/core/langchain_core/runnables/configurable.py index da4e75f72fb..5552fa620ea 100644 --- a/libs/core/langchain_core/runnables/configurable.py +++ b/libs/core/langchain_core/runnables/configurable.py @@ -1,4 +1,4 @@ -"""Runnables that can be dynamically configured.""" +"""`Runnable` objects that can be dynamically configured.""" from __future__ import annotations @@ -47,14 +47,14 @@ if TYPE_CHECKING: class DynamicRunnable(RunnableSerializable[Input, Output]): - """Serializable Runnable that can be dynamically configured. + """Serializable `Runnable` that can be dynamically configured. - A DynamicRunnable should be initiated using the `configurable_fields` or - `configurable_alternatives` method of a Runnable. + A `DynamicRunnable` should be initiated using the `configurable_fields` or + `configurable_alternatives` method of a `Runnable`. """ default: RunnableSerializable[Input, Output] - """The default Runnable to use.""" + """The default `Runnable` to use.""" config: RunnableConfig | None = None """The configuration to use.""" @@ -66,13 +66,13 @@ class DynamicRunnable(RunnableSerializable[Input, Output]): @classmethod @override def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` @@ -120,13 +120,13 @@ class DynamicRunnable(RunnableSerializable[Input, Output]): def prepare( self, config: RunnableConfig | None = None ) -> tuple[Runnable[Input, Output], RunnableConfig]: - """Prepare the Runnable for invocation. + """Prepare the `Runnable` for invocation. Args: - config: The configuration to use. Defaults to `None`. + config: The configuration to use. Returns: - The prepared Runnable and configuration. + The prepared `Runnable` and configuration. """ runnable: Runnable[Input, Output] = self while isinstance(runnable, DynamicRunnable): @@ -316,12 +316,12 @@ class DynamicRunnable(RunnableSerializable[Input, Output]): class RunnableConfigurableFields(DynamicRunnable[Input, Output]): - """Runnable that can be dynamically configured. + """`Runnable` that can be dynamically configured. - A RunnableConfigurableFields should be initiated using the - `configurable_fields` method of a Runnable. + A `RunnableConfigurableFields` should be initiated using the + `configurable_fields` method of a `Runnable`. - Here is an example of using a RunnableConfigurableFields with LLMs: + Here is an example of using a `RunnableConfigurableFields` with LLMs: ```python from langchain_core.prompts import PromptTemplate @@ -348,7 +348,7 @@ class RunnableConfigurableFields(DynamicRunnable[Input, Output]): chain.invoke({"x": 0}, config={"configurable": {"temperature": 0.9}}) ``` - Here is an example of using a RunnableConfigurableFields with HubRunnables: + Here is an example of using a `RunnableConfigurableFields` with `HubRunnables`: ```python from langchain_core.prompts import PromptTemplate @@ -380,7 +380,7 @@ class RunnableConfigurableFields(DynamicRunnable[Input, Output]): @property def config_specs(self) -> list[ConfigurableFieldSpec]: - """Get the configuration specs for the RunnableConfigurableFields. + """Get the configuration specs for the `RunnableConfigurableFields`. Returns: The configuration specs. @@ -473,13 +473,13 @@ _enums_for_spec_lock = threading.Lock() class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]): - """Runnable that can be dynamically configured. + """`Runnable` that can be dynamically configured. - A RunnableConfigurableAlternatives should be initiated using the - `configurable_alternatives` method of a Runnable or can be + A `RunnableConfigurableAlternatives` should be initiated using the + `configurable_alternatives` method of a `Runnable` or can be initiated directly as well. - Here is an example of using a RunnableConfigurableAlternatives that uses + Here is an example of using a `RunnableConfigurableAlternatives` that uses alternative prompts to illustrate its functionality: ```python @@ -506,7 +506,7 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]): chain.with_config(configurable={"prompt": "poem"}).invoke({"topic": "bears"}) ``` - Equivalently, you can initialize RunnableConfigurableAlternatives directly + Equivalently, you can initialize `RunnableConfigurableAlternatives` directly and use in LCEL in the same way: ```python @@ -531,7 +531,7 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]): """ which: ConfigurableField - """The ConfigurableField to use to choose between alternatives.""" + """The `ConfigurableField` to use to choose between alternatives.""" alternatives: dict[ str, @@ -540,12 +540,13 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]): """The alternatives to choose from.""" default_key: str = "default" - """The enum value to use for the default option. Defaults to `'default'`.""" + """The enum value to use for the default option.""" prefix_keys: bool """Whether to prefix configurable fields of each alternative with a namespace - of the form ==, eg. a key named "temperature" used by - the alternative named "gpt3" becomes "model==gpt3/temperature".""" + of the form ==, e.g. a key named "temperature" used by + the alternative named "gpt3" becomes "model==gpt3/temperature". + """ @property @override @@ -638,24 +639,24 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]): def _strremoveprefix(s: str, prefix: str) -> str: - """str.removeprefix() is only available in Python 3.9+.""" + """`str.removeprefix()` is only available in Python 3.9+.""" return s.replace(prefix, "", 1) if s.startswith(prefix) else s def prefix_config_spec( spec: ConfigurableFieldSpec, prefix: str ) -> ConfigurableFieldSpec: - """Prefix the id of a ConfigurableFieldSpec. + """Prefix the id of a `ConfigurableFieldSpec`. - This is useful when a RunnableConfigurableAlternatives is used as a - ConfigurableField of another RunnableConfigurableAlternatives. + This is useful when a `RunnableConfigurableAlternatives` is used as a + `ConfigurableField` of another `RunnableConfigurableAlternatives`. Args: - spec: The ConfigurableFieldSpec to prefix. + spec: The `ConfigurableFieldSpec` to prefix. prefix: The prefix to add. Returns: - The prefixed ConfigurableFieldSpec. + The prefixed `ConfigurableFieldSpec`. """ return ( ConfigurableFieldSpec( @@ -677,15 +678,15 @@ def make_options_spec( ) -> ConfigurableFieldSpec: """Make options spec. - Make a ConfigurableFieldSpec for a ConfigurableFieldSingleOption or - ConfigurableFieldMultiOption. + Make a `ConfigurableFieldSpec` for a `ConfigurableFieldSingleOption` or + `ConfigurableFieldMultiOption`. Args: - spec: The ConfigurableFieldSingleOption or ConfigurableFieldMultiOption. + spec: The `ConfigurableFieldSingleOption` or `ConfigurableFieldMultiOption`. description: The description to use if the spec does not have one. Returns: - The ConfigurableFieldSpec. + The `ConfigurableFieldSpec`. """ with _enums_for_spec_lock: if enum := _enums_for_spec.get(spec): diff --git a/libs/core/langchain_core/runnables/fallbacks.py b/libs/core/langchain_core/runnables/fallbacks.py index 61f1753d8dd..0584d64e60c 100644 --- a/libs/core/langchain_core/runnables/fallbacks.py +++ b/libs/core/langchain_core/runnables/fallbacks.py @@ -35,20 +35,20 @@ if TYPE_CHECKING: class RunnableWithFallbacks(RunnableSerializable[Input, Output]): - """Runnable that can fallback to other Runnables if it fails. + """`Runnable` that can fallback to other `Runnable`s if it fails. External APIs (e.g., APIs for a language model) may at times experience degraded performance or even downtime. - In these cases, it can be useful to have a fallback Runnable that can be - used in place of the original Runnable (e.g., fallback to another LLM provider). + In these cases, it can be useful to have a fallback `Runnable` that can be + used in place of the original `Runnable` (e.g., fallback to another LLM provider). - Fallbacks can be defined at the level of a single Runnable, or at the level - of a chain of Runnables. Fallbacks are tried in order until one succeeds or + Fallbacks can be defined at the level of a single `Runnable`, or at the level + of a chain of `Runnable`s. Fallbacks are tried in order until one succeeds or all fail. While you can instantiate a `RunnableWithFallbacks` directly, it is usually - more convenient to use the `with_fallbacks` method on a Runnable. + more convenient to use the `with_fallbacks` method on a `Runnable`. Example: ```python @@ -87,7 +87,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): """ runnable: Runnable[Input, Output] - """The Runnable to run first.""" + """The `Runnable` to run first.""" fallbacks: Sequence[Runnable[Input, Output]] """A sequence of fallbacks to try.""" exceptions_to_handle: tuple[type[BaseException], ...] = (Exception,) @@ -96,10 +96,13 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): Any exception that is not a subclass of these exceptions will be raised immediately. """ exception_key: str | None = None - """If string is specified then handled exceptions will be passed to fallbacks as - part of the input under the specified key. If `None`, exceptions - will not be passed to fallbacks. If used, the base Runnable and its fallbacks - must accept a dictionary as input.""" + """If `string` is specified then handled exceptions will be passed to fallbacks as + part of the input under the specified key. + + If `None`, exceptions will not be passed to fallbacks. + + If used, the base `Runnable` and its fallbacks must accept a dictionary as input. + """ model_config = ConfigDict( arbitrary_types_allowed=True, @@ -137,13 +140,13 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): @classmethod @override def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` @@ -152,10 +155,10 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): @property def runnables(self) -> Iterator[Runnable[Input, Output]]: - """Iterator over the Runnable and its fallbacks. + """Iterator over the `Runnable` and its fallbacks. Yields: - The Runnable then its fallbacks. + The `Runnable` then its fallbacks. """ yield self.runnable yield from self.fallbacks @@ -589,14 +592,14 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): await run_manager.on_chain_end(output) def __getattr__(self, name: str) -> Any: - """Get an attribute from the wrapped Runnable and its fallbacks. + """Get an attribute from the wrapped `Runnable` and its fallbacks. Returns: - If the attribute is anything other than a method that outputs a Runnable, - returns getattr(self.runnable, name). If the attribute is a method that - does return a new Runnable (e.g. llm.bind_tools([...]) outputs a new - RunnableBinding) then self.runnable and each of the runnables in - self.fallbacks is replaced with getattr(x, name). + If the attribute is anything other than a method that outputs a `Runnable`, + returns `getattr(self.runnable, name)`. If the attribute is a method that + does return a new `Runnable` (e.g. `model.bind_tools([...])` outputs a new + `RunnableBinding`) then `self.runnable` and each of the runnables in + `self.fallbacks` is replaced with `getattr(x, name)`. Example: ```python @@ -604,21 +607,20 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): from langchain_anthropic import ChatAnthropic gpt_4o = ChatOpenAI(model="gpt-4o") - claude_3_sonnet = ChatAnthropic(model="claude-3-7-sonnet-20250219") - llm = gpt_4o.with_fallbacks([claude_3_sonnet]) + claude_3_sonnet = ChatAnthropic(model="claude-sonnet-4-5-20250929") + model = gpt_4o.with_fallbacks([claude_3_sonnet]) - llm.model_name + model.model_name # -> "gpt-4o" # .bind_tools() is called on both ChatOpenAI and ChatAnthropic # Equivalent to: # gpt_4o.bind_tools([...]).with_fallbacks([claude_3_sonnet.bind_tools([...])]) - llm.bind_tools([...]) + model.bind_tools([...]) # -> RunnableWithFallbacks( runnable=RunnableBinding(bound=ChatOpenAI(...), kwargs={"tools": [...]}), fallbacks=[RunnableBinding(bound=ChatAnthropic(...), kwargs={"tools": [...]})], ) - ``` """ # noqa: E501 attr = getattr(self.runnable, name) diff --git a/libs/core/langchain_core/runnables/graph.py b/libs/core/langchain_core/runnables/graph.py index 589b797fd2a..76f1cc1f5b0 100644 --- a/libs/core/langchain_core/runnables/graph.py +++ b/libs/core/langchain_core/runnables/graph.py @@ -52,7 +52,7 @@ def is_uuid(value: str) -> bool: value: The string to check. Returns: - True if the string is a valid UUID, False otherwise. + `True` if the string is a valid UUID, `False` otherwise. """ try: UUID(value) @@ -69,16 +69,16 @@ class Edge(NamedTuple): target: str """The target node id.""" data: Stringifiable | None = None - """Optional data associated with the edge. Defaults to `None`.""" + """Optional data associated with the edge. """ conditional: bool = False - """Whether the edge is conditional. Defaults to `False`.""" + """Whether the edge is conditional.""" def copy(self, *, source: str | None = None, target: str | None = None) -> Edge: """Return a copy of the edge with optional new source and target nodes. Args: - source: The new source node id. Defaults to `None`. - target: The new target node id. Defaults to `None`. + source: The new source node id. + target: The new target node id. Returns: A copy of the edge with the new source and target nodes. @@ -101,7 +101,7 @@ class Node(NamedTuple): data: type[BaseModel] | RunnableType | None """The data of the node.""" metadata: dict[str, Any] | None - """Optional metadata for the node. Defaults to `None`.""" + """Optional metadata for the node. """ def copy( self, @@ -112,8 +112,8 @@ class Node(NamedTuple): """Return a copy of the node with optional new id and name. Args: - id: The new node id. Defaults to `None`. - name: The new node name. Defaults to `None`. + id: The new node id. + name: The new node name. Returns: A copy of the node with the new id and name. @@ -132,7 +132,7 @@ class Branch(NamedTuple): condition: Callable[..., str] """A callable that returns a string representation of the condition.""" ends: dict[str, str] | None - """Optional dictionary of end node ids for the branches. Defaults to `None`.""" + """Optional dictionary of end node IDs for the branches. """ class CurveStyle(Enum): @@ -157,9 +157,9 @@ class NodeStyles: """Schema for Hexadecimal color codes for different node types. Args: - default: The default color code. Defaults to "fill:#f2f0ff,line-height:1.2". - first: The color code for the first node. Defaults to "fill-opacity:0". - last: The color code for the last node. Defaults to "fill:#bfb6fc". + default: The default color code. + first: The color code for the first node. + last: The color code for the last node. """ default: str = "fill:#f2f0ff,line-height:1.2" @@ -201,9 +201,9 @@ def node_data_json( """Convert the data of a node to a JSON-serializable format. Args: - node: The node to convert. - with_schemas: Whether to include the schema of the data if - it is a Pydantic model. Defaults to `False`. + node: The `Node` to convert. + with_schemas: Whether to include the schema of the data if it is a Pydantic + model. Returns: A dictionary with the type of the data and the data itself. @@ -267,7 +267,7 @@ class Graph: Args: with_schemas: Whether to include the schemas of the nodes if they are - Pydantic models. Defaults to `False`. + Pydantic models. Returns: A dictionary with the nodes and edges of the graph. @@ -321,8 +321,8 @@ class Graph: Args: data: The data of the node. - id: The id of the node. Defaults to `None`. - metadata: Optional metadata for the node. Defaults to `None`. + id: The id of the node. + metadata: Optional metadata for the node. Returns: The node that was added to the graph. @@ -361,8 +361,8 @@ class Graph: Args: source: The source node of the edge. target: The target node of the edge. - data: Optional data associated with the edge. Defaults to `None`. - conditional: Whether the edge is conditional. Defaults to `False`. + data: Optional data associated with the edge. + conditional: Whether the edge is conditional. Returns: The edge that was added to the graph. @@ -391,7 +391,7 @@ class Graph: Args: graph: The graph to add. - prefix: The prefix to add to the node ids. Defaults to "". + prefix: The prefix to add to the node ids. Returns: A tuple of the first and last nodes of the subgraph. @@ -458,7 +458,7 @@ class Graph: def first_node(self) -> Node | None: """Find the single node that is not a target of any edge. - If there is no such node, or there are multiple, return None. + If there is no such node, or there are multiple, return `None`. When drawing the graph, this node would be the origin. Returns: @@ -470,7 +470,7 @@ class Graph: def last_node(self) -> Node | None: """Find the single node that is not a source of any edge. - If there is no such node, or there are multiple, return None. + If there is no such node, or there are multiple, return `None`. When drawing the graph, this node would be the destination. Returns: @@ -549,8 +549,8 @@ class Graph: Args: output_file_path: The path to save the image to. If `None`, the image - is not saved. Defaults to `None`. - fontname: The name of the font to use. Defaults to `None`. + is not saved. + fontname: The name of the font to use. labels: Optional labels for nodes and edges in the graph. Defaults to `None`. @@ -585,14 +585,13 @@ class Graph: """Draw the graph as a Mermaid syntax string. Args: - with_styles: Whether to include styles in the syntax. Defaults to `True`. - curve_style: The style of the edges. Defaults to CurveStyle.LINEAR. - node_colors: The colors of the nodes. Defaults to NodeStyles(). + with_styles: Whether to include styles in the syntax. + curve_style: The style of the edges. + node_colors: The colors of the nodes. wrap_label_n_words: The number of words to wrap the node labels at. - Defaults to 9. frontmatter_config: Mermaid frontmatter config. Can be used to customize theme and styles. Will be converted to YAML and - added to the beginning of the mermaid graph. Defaults to `None`. + added to the beginning of the mermaid graph. See more here: https://mermaid.js.org/config/configuration.html. @@ -647,23 +646,19 @@ class Graph: """Draw the graph as a PNG image using Mermaid. Args: - curve_style: The style of the edges. Defaults to CurveStyle.LINEAR. - node_colors: The colors of the nodes. Defaults to NodeStyles(). + curve_style: The style of the edges. + node_colors: The colors of the nodes. wrap_label_n_words: The number of words to wrap the node labels at. - Defaults to 9. output_file_path: The path to save the image to. If `None`, the image - is not saved. Defaults to `None`. + is not saved. draw_method: The method to use to draw the graph. - Defaults to MermaidDrawMethod.API. - background_color: The color of the background. Defaults to "white". - padding: The padding around the graph. Defaults to 10. - max_retries: The maximum number of retries (MermaidDrawMethod.API). - Defaults to 1. - retry_delay: The delay between retries (MermaidDrawMethod.API). - Defaults to 1.0. + background_color: The color of the background. + padding: The padding around the graph. + max_retries: The maximum number of retries (`MermaidDrawMethod.API`). + retry_delay: The delay between retries (`MermaidDrawMethod.API`). frontmatter_config: Mermaid frontmatter config. Can be used to customize theme and styles. Will be converted to YAML and - added to the beginning of the mermaid graph. Defaults to `None`. + added to the beginning of the mermaid graph. See more here: https://mermaid.js.org/config/configuration.html. @@ -679,7 +674,7 @@ class Graph: } ``` base_url: The base URL of the Mermaid server for rendering via API. - Defaults to `None`. + Returns: The PNG image as bytes. @@ -711,8 +706,10 @@ class Graph: def _first_node(graph: Graph, exclude: Sequence[str] = ()) -> Node | None: """Find the single node that is not a target of any edge. - Exclude nodes/sources with ids in the exclude list. - If there is no such node, or there are multiple, return None. + Exclude nodes/sources with IDs in the exclude list. + + If there is no such node, or there are multiple, return `None`. + When drawing the graph, this node would be the origin. """ targets = {edge.target for edge in graph.edges if edge.source not in exclude} @@ -727,8 +724,10 @@ def _first_node(graph: Graph, exclude: Sequence[str] = ()) -> Node | None: def _last_node(graph: Graph, exclude: Sequence[str] = ()) -> Node | None: """Find the single node that is not a source of any edge. - Exclude nodes/targets with ids in the exclude list. - If there is no such node, or there are multiple, return None. + Exclude nodes/targets with IDs in the exclude list. + + If there is no such node, or there are multiple, return `None`. + When drawing the graph, this node would be the destination. """ sources = {edge.source for edge in graph.edges if edge.target not in exclude} diff --git a/libs/core/langchain_core/runnables/graph_mermaid.py b/libs/core/langchain_core/runnables/graph_mermaid.py index d6c6d2c7d4c..4f0e494b7c7 100644 --- a/libs/core/langchain_core/runnables/graph_mermaid.py +++ b/libs/core/langchain_core/runnables/graph_mermaid.py @@ -58,15 +58,15 @@ def draw_mermaid( Args: nodes: List of node ids. edges: List of edges, object with a source, target and data. - first_node: Id of the first node. Defaults to `None`. - last_node: Id of the last node. Defaults to `None`. - with_styles: Whether to include styles in the graph. Defaults to `True`. - curve_style: Curve style for the edges. Defaults to CurveStyle.LINEAR. - node_styles: Node colors for different types. Defaults to NodeStyles(). - wrap_label_n_words: Words to wrap the edge labels. Defaults to 9. + first_node: Id of the first node. + last_node: Id of the last node. + with_styles: Whether to include styles in the graph. + curve_style: Curve style for the edges. + node_styles: Node colors for different types. + wrap_label_n_words: Words to wrap the edge labels. frontmatter_config: Mermaid frontmatter config. Can be used to customize theme and styles. Will be converted to YAML and - added to the beginning of the mermaid graph. Defaults to `None`. + added to the beginning of the mermaid graph. See more here: https://mermaid.js.org/config/configuration.html. @@ -286,13 +286,13 @@ def draw_mermaid_png( Args: mermaid_syntax: Mermaid graph syntax. - output_file_path: Path to save the PNG image. Defaults to `None`. - draw_method: Method to draw the graph. Defaults to MermaidDrawMethod.API. - background_color: Background color of the image. Defaults to "white". - padding: Padding around the image. Defaults to 10. - max_retries: Maximum number of retries (MermaidDrawMethod.API). Defaults to 1. - retry_delay: Delay between retries (MermaidDrawMethod.API). Defaults to 1.0. - base_url: Base URL for the Mermaid.ink API. Defaults to `None`. + output_file_path: Path to save the PNG image. + draw_method: Method to draw the graph. + background_color: Background color of the image. + padding: Padding around the image. + max_retries: Maximum number of retries (MermaidDrawMethod.API). + retry_delay: Delay between retries (MermaidDrawMethod.API). + base_url: Base URL for the Mermaid.ink API. Returns: PNG image bytes. @@ -454,7 +454,10 @@ def _render_mermaid_using_api( return img_bytes # If we get a server error (5xx), retry - if 500 <= response.status_code < 600 and attempt < max_retries: + if ( + requests.codes.internal_server_error <= response.status_code + and attempt < max_retries + ): # Exponential backoff with jitter sleep_time = retry_delay * (2**attempt) * (0.5 + 0.5 * random.random()) # noqa: S311 not used for crypto time.sleep(sleep_time) diff --git a/libs/core/langchain_core/runnables/graph_png.py b/libs/core/langchain_core/runnables/graph_png.py index a01a1c475f3..75cc2a796aa 100644 --- a/libs/core/langchain_core/runnables/graph_png.py +++ b/libs/core/langchain_core/runnables/graph_png.py @@ -45,7 +45,7 @@ class PngDrawer: } } The keys are the original labels, and the values are the new labels. - Defaults to `None`. + """ self.fontname = fontname or "arial" self.labels = labels or LabelsDict(nodes={}, edges={}) @@ -104,8 +104,8 @@ class PngDrawer: viz: The graphviz object. source: The source node. target: The target node. - label: The label for the edge. Defaults to `None`. - conditional: Whether the edge is conditional. Defaults to `False`. + label: The label for the edge. + conditional: Whether the edge is conditional. """ viz.add_edge( source, diff --git a/libs/core/langchain_core/runnables/history.py b/libs/core/langchain_core/runnables/history.py index ce77aac9450..91628a8b1d9 100644 --- a/libs/core/langchain_core/runnables/history.py +++ b/libs/core/langchain_core/runnables/history.py @@ -36,23 +36,23 @@ GetSessionHistoryCallable = Callable[..., BaseChatMessageHistory] class RunnableWithMessageHistory(RunnableBindingBase): # type: ignore[no-redef] - """Runnable that manages chat message history for another Runnable. + """`Runnable` that manages chat message history for another `Runnable`. A chat message history is a sequence of messages that represent a conversation. - RunnableWithMessageHistory wraps another Runnable and manages the chat message + `RunnableWithMessageHistory` wraps another `Runnable` and manages the chat message history for it; it is responsible for reading and updating the chat message history. - The formats supported for the inputs and outputs of the wrapped Runnable + The formats supported for the inputs and outputs of the wrapped `Runnable` are described below. - RunnableWithMessageHistory must always be called with a config that contains + `RunnableWithMessageHistory` must always be called with a config that contains the appropriate parameters for the chat message history factory. - By default, the Runnable is expected to take a single configuration parameter + By default, the `Runnable` is expected to take a single configuration parameter called `session_id` which is a string. This parameter is used to create a new - or look up an existing chat message history that matches the given session_id. + or look up an existing chat message history that matches the given `session_id`. In this case, the invocation would look like this: @@ -117,12 +117,12 @@ class RunnableWithMessageHistory(RunnableBindingBase): # type: ignore[no-redef] ``` - Example where the wrapped Runnable takes a dictionary input: + Example where the wrapped `Runnable` takes a dictionary input: ```python from typing import Optional - from langchain_community.chat_models import ChatAnthropic + from langchain_anthropic import ChatAnthropic from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain_core.runnables.history import RunnableWithMessageHistory @@ -166,7 +166,7 @@ class RunnableWithMessageHistory(RunnableBindingBase): # type: ignore[no-redef] print(store) # noqa: T201 ``` - Example where the session factory takes two keys, user_id and conversation id): + Example where the session factory takes two keys (`user_id` and `conversation_id`): ```python store = {} @@ -223,21 +223,28 @@ class RunnableWithMessageHistory(RunnableBindingBase): # type: ignore[no-redef] """ get_session_history: GetSessionHistoryCallable - """Function that returns a new BaseChatMessageHistory. + """Function that returns a new `BaseChatMessageHistory`. + This function should either take a single positional argument `session_id` of type - string and return a corresponding chat message history instance""" + string and return a corresponding chat message history instance + """ input_messages_key: str | None = None - """Must be specified if the base runnable accepts a dict as input. - The key in the input dict that contains the messages.""" + """Must be specified if the base `Runnable` accepts a `dict` as input. + The key in the input `dict` that contains the messages. + """ output_messages_key: str | None = None - """Must be specified if the base Runnable returns a dict as output. - The key in the output dict that contains the messages.""" + """Must be specified if the base `Runnable` returns a `dict` as output. + The key in the output `dict` that contains the messages. + """ history_messages_key: str | None = None - """Must be specified if the base runnable accepts a dict as input and expects a - separate key for historical messages.""" + """Must be specified if the base `Runnable` accepts a `dict` as input and expects a + separate key for historical messages. + """ history_factory_config: Sequence[ConfigurableFieldSpec] """Configure fields that should be passed to the chat history factory. - See `ConfigurableFieldSpec` for more details.""" + + See `ConfigurableFieldSpec` for more details. + """ def __init__( self, @@ -254,15 +261,16 @@ class RunnableWithMessageHistory(RunnableBindingBase): # type: ignore[no-redef] history_factory_config: Sequence[ConfigurableFieldSpec] | None = None, **kwargs: Any, ) -> None: - """Initialize RunnableWithMessageHistory. + """Initialize `RunnableWithMessageHistory`. Args: - runnable: The base Runnable to be wrapped. + runnable: The base `Runnable` to be wrapped. + Must take as input one of: 1. A list of `BaseMessage` - 2. A dict with one key for all messages - 3. A dict with one key for the current input string/message(s) and + 2. A `dict` with one key for all messages + 3. A `dict` with one key for the current input string/message(s) and a separate key for historical messages. If the input key points to a string, it will be treated as a `HumanMessage` in history. @@ -270,13 +278,15 @@ class RunnableWithMessageHistory(RunnableBindingBase): # type: ignore[no-redef] 1. A string which can be treated as an `AIMessage` 2. A `BaseMessage` or sequence of `BaseMessage` - 3. A dict with a key for a `BaseMessage` or sequence of + 3. A `dict` with a key for a `BaseMessage` or sequence of `BaseMessage` - get_session_history: Function that returns a new BaseChatMessageHistory. + get_session_history: Function that returns a new `BaseChatMessageHistory`. + This function should either take a single positional argument `session_id` of type string and return a corresponding chat message history instance. + ```python def get_session_history( session_id: str, *, user_id: str | None = None @@ -295,16 +305,17 @@ class RunnableWithMessageHistory(RunnableBindingBase): # type: ignore[no-redef] ) -> BaseChatMessageHistory: ... ``` - input_messages_key: Must be specified if the base runnable accepts a dict - as input. Default is None. - output_messages_key: Must be specified if the base runnable returns a dict - as output. Default is None. - history_messages_key: Must be specified if the base runnable accepts a dict - as input and expects a separate key for historical messages. + input_messages_key: Must be specified if the base runnable accepts a `dict` + as input. + output_messages_key: Must be specified if the base runnable returns a `dict` + as output. + history_messages_key: Must be specified if the base runnable accepts a + `dict` as input and expects a separate key for historical messages. history_factory_config: Configure fields that should be passed to the chat history factory. See `ConfigurableFieldSpec` for more details. - Specifying these allows you to pass multiple config keys - into the get_session_history factory. + + Specifying these allows you to pass multiple config keys into the + `get_session_history` factory. **kwargs: Arbitrary additional kwargs to pass to parent class `RunnableBindingBase` init. @@ -364,7 +375,7 @@ class RunnableWithMessageHistory(RunnableBindingBase): # type: ignore[no-redef] @property @override def config_specs(self) -> list[ConfigurableFieldSpec]: - """Get the configuration specs for the RunnableWithMessageHistory.""" + """Get the configuration specs for the `RunnableWithMessageHistory`.""" return get_unique_config_specs( super().config_specs + list(self.history_factory_config) ) @@ -606,6 +617,6 @@ class RunnableWithMessageHistory(RunnableBindingBase): # type: ignore[no-redef] def _get_parameter_names(callable_: GetSessionHistoryCallable) -> list[str]: - """Get the parameter names of the callable.""" + """Get the parameter names of the `Callable`.""" sig = inspect.signature(callable_) return list(sig.parameters.keys()) diff --git a/libs/core/langchain_core/runnables/passthrough.py b/libs/core/langchain_core/runnables/passthrough.py index 76a0aa2126a..740bcab8d24 100644 --- a/libs/core/langchain_core/runnables/passthrough.py +++ b/libs/core/langchain_core/runnables/passthrough.py @@ -51,10 +51,10 @@ def identity(x: Other) -> Other: """Identity function. Args: - x: input. + x: Input. Returns: - output. + Output. """ return x @@ -63,10 +63,10 @@ async def aidentity(x: Other) -> Other: """Async identity function. Args: - x: input. + x: Input. Returns: - output. + Output. """ return x @@ -74,11 +74,11 @@ async def aidentity(x: Other) -> Other: class RunnablePassthrough(RunnableSerializable[Other, Other]): """Runnable to passthrough inputs unchanged or with additional keys. - This Runnable behaves almost like the identity function, except that it + This `Runnable` behaves almost like the identity function, except that it can be configured to add additional keys to the output, if the input is a dict. - The examples below demonstrate this Runnable works using a few simple + The examples below demonstrate this `Runnable` works using a few simple chains. The chains rely on simple lambdas to make the examples easy to execute and experiment with. @@ -164,7 +164,7 @@ class RunnablePassthrough(RunnableSerializable[Other, Other]): input_type: type[Other] | None = None, **kwargs: Any, ) -> None: - """Create e RunnablePassthrough. + """Create a `RunnablePassthrough`. Args: func: Function to be called with the input. @@ -180,12 +180,12 @@ class RunnablePassthrough(RunnableSerializable[Other, Other]): @classmethod @override def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` @@ -213,11 +213,11 @@ class RunnablePassthrough(RunnableSerializable[Other, Other]): """Merge the Dict input with the output produced by the mapping argument. Args: - **kwargs: Runnable, Callable or a Mapping from keys to Runnables - or Callables. + **kwargs: `Runnable`, `Callable` or a `Mapping` from keys to `Runnable` + objects or `Callable`s. Returns: - A Runnable that merges the Dict input with the output produced by the + A `Runnable` that merges the `dict` input with the output produced by the mapping argument. """ return RunnableAssign(RunnableParallel[dict[str, Any]](kwargs)) @@ -350,7 +350,7 @@ _graph_passthrough: RunnablePassthrough = RunnablePassthrough() class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]): - """Runnable that assigns key-value pairs to dict[str, Any] inputs. + """Runnable that assigns key-value pairs to `dict[str, Any]` inputs. The `RunnableAssign` class takes input dictionaries and, through a `RunnableParallel` instance, applies transformations, then combines @@ -392,7 +392,7 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]): mapper: RunnableParallel def __init__(self, mapper: RunnableParallel[dict[str, Any]], **kwargs: Any) -> None: - """Create a RunnableAssign. + """Create a `RunnableAssign`. Args: mapper: A `RunnableParallel` instance that will be used to transform the @@ -403,13 +403,13 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]): @classmethod @override def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` @@ -668,13 +668,19 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]): yield chunk -class RunnablePick(RunnableSerializable[dict[str, Any], dict[str, Any]]): - """Runnable that picks keys from dict[str, Any] inputs. +class RunnablePick(RunnableSerializable[dict[str, Any], Any]): + """`Runnable` that picks keys from `dict[str, Any]` inputs. - RunnablePick class represents a Runnable that selectively picks keys from a + `RunnablePick` class represents a `Runnable` that selectively picks keys from a dictionary input. It allows you to specify one or more keys to extract - from the input dictionary. It returns a new dictionary containing only - the selected keys. + from the input dictionary. + + !!! note "Return Type Behavior" + The return type depends on the `keys` parameter: + + - When `keys` is a `str`: Returns the single value associated with that key + - When `keys` is a `list`: Returns a dictionary containing only the selected + keys Example: ```python @@ -687,18 +693,22 @@ class RunnablePick(RunnableSerializable[dict[str, Any], dict[str, Any]]): "country": "USA", } - runnable = RunnablePick(keys=["name", "age"]) + # Single key - returns the value directly + runnable_single = RunnablePick(keys="name") + result_single = runnable_single.invoke(input_data) + print(result_single) # Output: "John" - output_data = runnable.invoke(input_data) - - print(output_data) # Output: {'name': 'John', 'age': 30} + # Multiple keys - returns a dictionary + runnable_multiple = RunnablePick(keys=["name", "age"]) + result_multiple = runnable_multiple.invoke(input_data) + print(result_multiple) # Output: {'name': 'John', 'age': 30} ``` """ keys: str | list[str] def __init__(self, keys: str | list[str], **kwargs: Any) -> None: - """Create a RunnablePick. + """Create a `RunnablePick`. Args: keys: A single key or a list of keys to pick from the input dictionary. @@ -708,13 +718,13 @@ class RunnablePick(RunnableSerializable[dict[str, Any], dict[str, Any]]): @classmethod @override def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` diff --git a/libs/core/langchain_core/runnables/retry.py b/libs/core/langchain_core/runnables/retry.py index ed0adf8eaf7..03466718274 100644 --- a/libs/core/langchain_core/runnables/retry.py +++ b/libs/core/langchain_core/runnables/retry.py @@ -126,7 +126,7 @@ class RunnableRetry(RunnableBindingBase[Input, Output]): # type: ignore[no-rede exponential_jitter_params: ExponentialJitterParams | None = None """Parameters for `tenacity.wait_exponential_jitter`. Namely: `initial`, - `max`, `exp_base`, and `jitter` (all float values). + `max`, `exp_base`, and `jitter` (all `float` values). """ max_attempt_number: int = 3 diff --git a/libs/core/langchain_core/runnables/router.py b/libs/core/langchain_core/runnables/router.py index 89b500d5e09..d9b4d44b8c3 100644 --- a/libs/core/langchain_core/runnables/router.py +++ b/libs/core/langchain_core/runnables/router.py @@ -40,11 +40,11 @@ class RouterInput(TypedDict): key: str """The key to route on.""" input: Any - """The input to pass to the selected Runnable.""" + """The input to pass to the selected `Runnable`.""" class RouterRunnable(RunnableSerializable[RouterInput, Output]): - """Runnable that routes to a set of Runnables based on Input['key']. + """`Runnable` that routes to a set of `Runnable` based on `Input['key']`. Returns the output of the selected Runnable. @@ -74,10 +74,10 @@ class RouterRunnable(RunnableSerializable[RouterInput, Output]): self, runnables: Mapping[str, Runnable[Any, Output] | Callable[[Any], Output]], ) -> None: - """Create a RouterRunnable. + """Create a `RouterRunnable`. Args: - runnables: A mapping of keys to Runnables. + runnables: A mapping of keys to `Runnable` objects. """ super().__init__( runnables={key: coerce_to_runnable(r) for key, r in runnables.items()} @@ -90,13 +90,13 @@ class RouterRunnable(RunnableSerializable[RouterInput, Output]): @classmethod @override def is_lc_serializable(cls) -> bool: - """Return True as this class is serializable.""" + """Return `True` as this class is serializable.""" return True @classmethod @override def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object. + """Get the namespace of the LangChain object. Returns: `["langchain", "schema", "runnable"]` diff --git a/libs/core/langchain_core/runnables/schema.py b/libs/core/langchain_core/runnables/schema.py index 828085fd435..919b7db4108 100644 --- a/libs/core/langchain_core/runnables/schema.py +++ b/libs/core/langchain_core/runnables/schema.py @@ -1,4 +1,4 @@ -"""Module contains typedefs that are used with Runnables.""" +"""Module contains typedefs that are used with `Runnable` objects.""" from __future__ import annotations @@ -14,43 +14,43 @@ class EventData(TypedDict, total=False): """Data associated with a streaming event.""" input: Any - """The input passed to the Runnable that generated the event. + """The input passed to the `Runnable` that generated the event. - Inputs will sometimes be available at the *START* of the Runnable, and - sometimes at the *END* of the Runnable. + Inputs will sometimes be available at the *START* of the `Runnable`, and + sometimes at the *END* of the `Runnable`. - If a Runnable is able to stream its inputs, then its input by definition - won't be known until the *END* of the Runnable when it has finished streaming + If a `Runnable` is able to stream its inputs, then its input by definition + won't be known until the *END* of the `Runnable` when it has finished streaming its inputs. """ error: NotRequired[BaseException] - """The error that occurred during the execution of the Runnable. + """The error that occurred during the execution of the `Runnable`. - This field is only available if the Runnable raised an exception. + This field is only available if the `Runnable` raised an exception. - !!! version-added "Added in version 1.0.0" + !!! version-added "Added in `langchain-core` 1.0.0" """ output: Any - """The output of the Runnable that generated the event. + """The output of the `Runnable` that generated the event. - Outputs will only be available at the *END* of the Runnable. + Outputs will only be available at the *END* of the `Runnable`. - For most Runnables, this field can be inferred from the `chunk` field, - though there might be some exceptions for special cased Runnables (e.g., like + For most `Runnable` objects, this field can be inferred from the `chunk` field, + though there might be some exceptions for special a cased `Runnable` (e.g., like chat models), which may return more information. """ chunk: Any """A streaming chunk from the output that generated the event. chunks support addition in general, and adding them up should result - in the output of the Runnable that generated the event. + in the output of the `Runnable` that generated the event. """ class BaseStreamEvent(TypedDict): """Streaming event. - Schema of a streaming event which is produced from the astream_events method. + Schema of a streaming event which is produced from the `astream_events` method. Example: ```python @@ -65,7 +65,7 @@ class BaseStreamEvent(TypedDict): events = [event async for event in chain.astream_events("hello")] - # will produce the following events + # Will produce the following events # (where some fields have been omitted for brevity): [ { @@ -94,45 +94,45 @@ class BaseStreamEvent(TypedDict): """ event: str - """Event names are of the format: on_[runnable_type]_(start|stream|end). + """Event names are of the format: `on_[runnable_type]_(start|stream|end)`. Runnable types are one of: - **llm** - used by non chat models - **chat_model** - used by chat models - - **prompt** -- e.g., ChatPromptTemplate - - **tool** -- from tools defined via @tool decorator or inheriting - from Tool/BaseTool - - **chain** - most Runnables are of this type + - **prompt** -- e.g., `ChatPromptTemplate` + - **tool** -- from tools defined via `@tool` decorator or inheriting + from `Tool`/`BaseTool` + - **chain** - most `Runnable` objects are of this type Further, the events are categorized as one of: - - **start** - when the Runnable starts - - **stream** - when the Runnable is streaming - - **end* - when the Runnable ends + - **start** - when the `Runnable` starts + - **stream** - when the `Runnable` is streaming + - **end* - when the `Runnable` ends start, stream and end are associated with slightly different `data` payload. Please see the documentation for `EventData` for more details. """ run_id: str - """An randomly generated ID to keep track of the execution of the given Runnable. + """An randomly generated ID to keep track of the execution of the given `Runnable`. - Each child Runnable that gets invoked as part of the execution of a parent Runnable - is assigned its own unique ID. + Each child `Runnable` that gets invoked as part of the execution of a parent + `Runnable` is assigned its own unique ID. """ tags: NotRequired[list[str]] - """Tags associated with the Runnable that generated this event. + """Tags associated with the `Runnable` that generated this event. - Tags are always inherited from parent Runnables. + Tags are always inherited from parent `Runnable` objects. - Tags can either be bound to a Runnable using `.with_config({"tags": ["hello"]})` + Tags can either be bound to a `Runnable` using `.with_config({"tags": ["hello"]})` or passed at run time using `.astream_events(..., {"tags": ["hello"]})`. """ metadata: NotRequired[dict[str, Any]] - """Metadata associated with the Runnable that generated this event. + """Metadata associated with the `Runnable` that generated this event. - Metadata can either be bound to a Runnable using + Metadata can either be bound to a `Runnable` using `.with_config({"metadata": { "foo": "bar" }})` @@ -146,8 +146,8 @@ class BaseStreamEvent(TypedDict): Root Events will have an empty list. - For example, if a Runnable A calls Runnable B, then the event generated by Runnable - B will have Runnable A's ID in the parent_ids field. + For example, if a `Runnable` A calls `Runnable` B, then the event generated by + `Runnable` B will have `Runnable` A's ID in the `parent_ids` field. The order of the parent IDs is from the root parent to the immediate parent. @@ -164,14 +164,11 @@ class StandardStreamEvent(BaseStreamEvent): The contents of the event data depend on the event type. """ name: str - """The name of the Runnable that generated the event.""" + """The name of the `Runnable` that generated the event.""" class CustomStreamEvent(BaseStreamEvent): - """Custom stream event created by the user. - - !!! version-added "Added in version 0.2.15" - """ + """Custom stream event created by the user.""" # Overwrite the event field to be more specific. event: Literal["on_custom_event"] # type: ignore[misc] diff --git a/libs/core/langchain_core/runnables/utils.py b/libs/core/langchain_core/runnables/utils.py index 51318bee13d..4860e70bc2c 100644 --- a/libs/core/langchain_core/runnables/utils.py +++ b/libs/core/langchain_core/runnables/utils.py @@ -5,6 +5,7 @@ from __future__ import annotations import ast import asyncio import inspect +import sys import textwrap from collections.abc import Callable, Mapping, Sequence from contextvars import Context @@ -80,7 +81,7 @@ def accepts_run_manager(callable: Callable[..., Any]) -> bool: # noqa: A002 callable: The callable to check. Returns: - True if the callable accepts a run_manager argument, False otherwise. + `True` if the callable accepts a run_manager argument, `False` otherwise. """ try: return signature(callable).parameters.get("run_manager") is not None @@ -95,7 +96,7 @@ def accepts_config(callable: Callable[..., Any]) -> bool: # noqa: A002 callable: The callable to check. Returns: - True if the callable accepts a config argument, False otherwise. + `True` if the callable accepts a config argument, `False` otherwise. """ try: return signature(callable).parameters.get("config") is not None @@ -110,7 +111,7 @@ def accepts_context(callable: Callable[..., Any]) -> bool: # noqa: A002 callable: The callable to check. Returns: - True if the callable accepts a context argument, False otherwise. + `True` if the callable accepts a context argument, `False` otherwise. """ try: return signature(callable).parameters.get("context") is not None @@ -118,14 +119,13 @@ def accepts_context(callable: Callable[..., Any]) -> bool: # noqa: A002 return False -@lru_cache(maxsize=1) def asyncio_accepts_context() -> bool: - """Cache the result of checking if asyncio.create_task accepts a `context` arg. + """Check if asyncio.create_task accepts a `context` arg. Returns: - True if `asyncio.create_task` accepts a context argument, False otherwise. + True if `asyncio.create_task` accepts a context argument, `False` otherwise. """ - return accepts_context(asyncio.create_task) + return sys.version_info >= (3, 11) def coro_with_context( @@ -136,7 +136,7 @@ def coro_with_context( Args: coro: The coroutine to await. context: The context to use. - create_task: Whether to create a task. Defaults to `False`. + create_task: Whether to create a task. Returns: The coroutine with the context. @@ -552,13 +552,13 @@ class ConfigurableField(NamedTuple): id: str """The unique identifier of the field.""" name: str | None = None - """The name of the field. Defaults to `None`.""" + """The name of the field. """ description: str | None = None - """The description of the field. Defaults to `None`.""" + """The description of the field. """ annotation: Any | None = None - """The annotation of the field. Defaults to `None`.""" + """The annotation of the field. """ is_shared: bool = False - """Whether the field is shared. Defaults to `False`.""" + """Whether the field is shared.""" @override def __hash__(self) -> int: @@ -575,11 +575,11 @@ class ConfigurableFieldSingleOption(NamedTuple): default: str """The default value for the field.""" name: str | None = None - """The name of the field. Defaults to `None`.""" + """The name of the field. """ description: str | None = None - """The description of the field. Defaults to `None`.""" + """The description of the field. """ is_shared: bool = False - """Whether the field is shared. Defaults to `False`.""" + """Whether the field is shared.""" @override def __hash__(self) -> int: @@ -596,11 +596,11 @@ class ConfigurableFieldMultiOption(NamedTuple): default: Sequence[str] """The default values for the field.""" name: str | None = None - """The name of the field. Defaults to `None`.""" + """The name of the field. """ description: str | None = None - """The description of the field. Defaults to `None`.""" + """The description of the field. """ is_shared: bool = False - """Whether the field is shared. Defaults to `False`.""" + """Whether the field is shared.""" @override def __hash__(self) -> int: @@ -620,15 +620,15 @@ class ConfigurableFieldSpec(NamedTuple): annotation: Any """The annotation of the field.""" name: str | None = None - """The name of the field. Defaults to `None`.""" + """The name of the field. """ description: str | None = None - """The description of the field. Defaults to `None`.""" + """The description of the field. """ default: Any = None - """The default value for the field. Defaults to `None`.""" + """The default value for the field. """ is_shared: bool = False - """Whether the field is shared. Defaults to `False`.""" + """Whether the field is shared.""" dependencies: list[str] | None = None - """The dependencies of the field. Defaults to `None`.""" + """The dependencies of the field. """ def get_unique_config_specs( @@ -727,7 +727,7 @@ def is_async_generator( func: The function to check. Returns: - True if the function is an async generator, False otherwise. + `True` if the function is an async generator, `False` otherwise. """ return inspect.isasyncgenfunction(func) or ( hasattr(func, "__call__") # noqa: B004 @@ -744,7 +744,7 @@ def is_async_callable( func: The function to check. Returns: - True if the function is async, False otherwise. + `True` if the function is async, `False` otherwise. """ return asyncio.iscoroutinefunction(func) or ( hasattr(func, "__call__") # noqa: B004 diff --git a/libs/core/langchain_core/stores.py b/libs/core/langchain_core/stores.py index 2c570b66580..77408d60962 100644 --- a/libs/core/langchain_core/stores.py +++ b/libs/core/langchain_core/stores.py @@ -86,7 +86,7 @@ class BaseStore(ABC, Generic[K, V]): Returns: A sequence of optional values associated with the keys. - If a key is not found, the corresponding value will be None. + If a key is not found, the corresponding value will be `None`. """ async def amget(self, keys: Sequence[K]) -> list[V | None]: @@ -97,7 +97,7 @@ class BaseStore(ABC, Generic[K, V]): Returns: A sequence of optional values associated with the keys. - If a key is not found, the corresponding value will be None. + If a key is not found, the corresponding value will be `None`. """ return await run_in_executor(None, self.mget, keys) @@ -209,7 +209,7 @@ class InMemoryBaseStore(BaseStore[str, V], Generic[V]): """Get an iterator over keys that match the given prefix. Args: - prefix: The prefix to match. Defaults to `None`. + prefix: The prefix to match. Yields: The keys that match the given prefix. @@ -225,7 +225,7 @@ class InMemoryBaseStore(BaseStore[str, V], Generic[V]): """Async get an async iterator over keys that match the given prefix. Args: - prefix: The prefix to match. Defaults to `None`. + prefix: The prefix to match. Yields: The keys that match the given prefix. @@ -243,8 +243,7 @@ class InMemoryStore(InMemoryBaseStore[Any]): """In-memory store for any type of data. Attributes: - store (dict[str, Any]): The underlying dictionary that stores - the key-value pairs. + store: The underlying dictionary that stores the key-value pairs. Examples: ```python @@ -267,8 +266,7 @@ class InMemoryByteStore(InMemoryBaseStore[bytes]): """In-memory store for bytes. Attributes: - store (dict[str, bytes]): The underlying dictionary that stores - the key-value pairs. + store: The underlying dictionary that stores the key-value pairs. Examples: ```python diff --git a/libs/core/langchain_core/sys_info.py b/libs/core/langchain_core/sys_info.py index ac83ad7d42e..86716af4714 100644 --- a/libs/core/langchain_core/sys_info.py +++ b/libs/core/langchain_core/sys_info.py @@ -125,9 +125,11 @@ def print_sys_info(*, additional_pkgs: Sequence[str] = ()) -> None: for dep in sub_dependencies: try: dep_version = metadata.version(dep) - print(f"> {dep}: {dep_version}") except Exception: - print(f"> {dep}: Installed. No version info available.") + dep_version = None + + if dep_version is not None: + print(f"> {dep}: {dep_version}") if __name__ == "__main__": diff --git a/libs/core/langchain_core/tools/base.py b/libs/core/langchain_core/tools/base.py index adeb49e6150..64648273e0f 100644 --- a/libs/core/langchain_core/tools/base.py +++ b/libs/core/langchain_core/tools/base.py @@ -92,7 +92,7 @@ def _is_annotated_type(typ: type[Any]) -> bool: typ: The type to check. Returns: - True if the type is an Annotated type, False otherwise. + `True` if the type is an Annotated type, `False` otherwise. """ return get_origin(typ) is typing.Annotated @@ -226,7 +226,7 @@ def _is_pydantic_annotation(annotation: Any, pydantic_version: str = "v2") -> bo pydantic_version: The Pydantic version to check against ("v1" or "v2"). Returns: - True if the annotation is a Pydantic model, False otherwise. + `True` if the annotation is a Pydantic model, `False` otherwise. """ base_model_class = BaseModelV1 if pydantic_version == "v1" else BaseModel try: @@ -245,7 +245,7 @@ def _function_annotations_are_pydantic_v1( func: The function being checked. Returns: - True if all Pydantic annotations are from V1, False otherwise. + True if all Pydantic annotations are from V1, `False` otherwise. Raises: NotImplementedError: If the function contains mixed V1 and V2 annotations. @@ -285,18 +285,17 @@ def create_schema_from_function( error_on_invalid_docstring: bool = False, include_injected: bool = True, ) -> type[BaseModel]: - """Create a pydantic schema from a function's signature. + """Create a Pydantic schema from a function's signature. Args: - model_name: Name to assign to the generated pydantic schema. + model_name: Name to assign to the generated Pydantic schema. func: Function to generate the schema from. filter_args: Optional list of arguments to exclude from the schema. - Defaults to FILTERED_ARGS. + Defaults to `FILTERED_ARGS`. parse_docstring: Whether to parse the function's docstring for descriptions - for each argument. Defaults to `False`. + for each argument. error_on_invalid_docstring: if `parse_docstring` is provided, configure - whether to raise ValueError on invalid Google Style docstrings. - Defaults to `False`. + whether to raise `ValueError` on invalid Google Style docstrings. include_injected: Whether to include injected arguments in the schema. Defaults to `True`, since we want to include them in the schema when *validating* tool inputs. @@ -312,7 +311,7 @@ def create_schema_from_function( # https://docs.pydantic.dev/latest/usage/validation_decorator/ with warnings.catch_warnings(): # We are using deprecated functionality here. - # This code should be re-written to simply construct a pydantic model + # This code should be re-written to simply construct a Pydantic model # using inspect.signature and create_model. warnings.simplefilter("ignore", category=PydanticDeprecationWarning) validated = validate_arguments(func, config=_SchemaConfig) # type: ignore[operator] @@ -392,6 +391,7 @@ class BaseTool(RunnableSerializable[str | dict | ToolCall, Any]): """Base class for all LangChain tools. This abstract class defines the interface that all LangChain tools must implement. + Tools are components that can be called by agents to perform specific actions. """ @@ -402,7 +402,7 @@ class BaseTool(RunnableSerializable[str | dict | ToolCall, Any]): **kwargs: Additional keyword arguments passed to the parent class. Raises: - SchemaAnnotationError: If args_schema has incorrect type annotation. + SchemaAnnotationError: If `args_schema` has incorrect type annotation. """ super().__init_subclass__(**kwargs) @@ -443,15 +443,15 @@ class ChildTool(BaseTool): Args schema should be either: - - A subclass of pydantic.BaseModel. - - A subclass of pydantic.v1.BaseModel if accessing v1 namespace in pydantic 2 - - a JSON schema dict + - A subclass of `pydantic.BaseModel`. + - A subclass of `pydantic.v1.BaseModel` if accessing v1 namespace in pydantic 2 + - A JSON schema dict """ return_direct: bool = False """Whether to return the tool's output directly. - Setting this to True means - that after the tool is called, the AgentExecutor will stop looping. + Setting this to `True` means that after the tool is called, the `AgentExecutor` will + stop looping. """ verbose: bool = False """Whether to log the tool's progress.""" @@ -460,32 +460,38 @@ class ChildTool(BaseTool): """Callbacks to be called during tool execution.""" tags: list[str] | None = None - """Optional list of tags associated with the tool. Defaults to `None`. + """Optional list of tags associated with the tool. + These tags will be associated with each call to this tool, and passed as arguments to the handlers defined in `callbacks`. - You can use these to eg identify a specific instance of a tool with its use case. + + You can use these to, e.g., identify a specific instance of a tool with its use + case. """ metadata: dict[str, Any] | None = None - """Optional metadata associated with the tool. Defaults to `None`. + """Optional metadata associated with the tool. + This metadata will be associated with each call to this tool, and passed as arguments to the handlers defined in `callbacks`. - You can use these to eg identify a specific instance of a tool with its use case. + + You can use these to, e.g., identify a specific instance of a tool with its use + case. """ handle_tool_error: bool | str | Callable[[ToolException], str] | None = False - """Handle the content of the ToolException thrown.""" + """Handle the content of the `ToolException` thrown.""" handle_validation_error: ( bool | str | Callable[[ValidationError | ValidationErrorV1], str] | None ) = False - """Handle the content of the ValidationError thrown.""" + """Handle the content of the `ValidationError` thrown.""" response_format: Literal["content", "content_and_artifact"] = "content" - """The tool response format. Defaults to 'content'. + """The tool response format. - If "content" then the output of the tool is interpreted as the contents of a - ToolMessage. If "content_and_artifact" then the output is expected to be a - two-tuple corresponding to the (content, artifact) of a ToolMessage. + If `'content'` then the output of the tool is interpreted as the contents of a + `ToolMessage`. If `'content_and_artifact'` then the output is expected to be a + two-tuple corresponding to the `(content, artifact)` of a `ToolMessage`. """ def __init__(self, **kwargs: Any) -> None: @@ -493,7 +499,7 @@ class ChildTool(BaseTool): Raises: TypeError: If `args_schema` is not a subclass of pydantic `BaseModel` or - dict. + `dict`. """ if ( "args_schema" in kwargs @@ -517,7 +523,7 @@ class ChildTool(BaseTool): """Check if the tool accepts only a single input argument. Returns: - True if the tool has only one input argument, False otherwise. + `True` if the tool has only one input argument, `False` otherwise. """ keys = {k for k in self.args if k != "kwargs"} return len(keys) == 1 @@ -527,7 +533,7 @@ class ChildTool(BaseTool): """Get the tool's input arguments schema. Returns: - Dictionary containing the tool's argument properties. + `dict` containing the tool's argument properties. """ if isinstance(self.args_schema, dict): json_schema = self.args_schema @@ -616,10 +622,10 @@ class ChildTool(BaseTool): The parsed and validated input. Raises: - ValueError: If string input is provided with JSON schema `args_schema`. - ValueError: If InjectedToolCallId is required but `tool_call_id` is not + ValueError: If `string` input is provided with JSON schema `args_schema`. + ValueError: If `InjectedToolCallId` is required but `tool_call_id` is not provided. - TypeError: If args_schema is not a Pydantic `BaseModel` or dict. + TypeError: If `args_schema` is not a Pydantic `BaseModel` or dict. """ input_args = self.args_schema if isinstance(tool_input, str): @@ -708,6 +714,35 @@ class ChildTool(BaseTool): kwargs["run_manager"] = kwargs["run_manager"].get_sync() return await run_in_executor(None, self._run, *args, **kwargs) + def _filter_injected_args(self, tool_input: dict) -> dict: + """Filter out injected tool arguments from the input dictionary. + + Injected arguments are those annotated with `InjectedToolArg` or its + subclasses, or arguments in `FILTERED_ARGS` like `run_manager` and callbacks. + + Args: + tool_input: The tool input dictionary to filter. + + Returns: + A filtered dictionary with injected arguments removed. + """ + # Start with filtered args from the constant + filtered_keys = set[str](FILTERED_ARGS) + + # If we have an args_schema, use it to identify injected args + if self.args_schema is not None: + try: + annotations = get_all_basemodel_annotations(self.args_schema) + for field_name, field_type in annotations.items(): + if _is_injected_arg_type(field_type): + filtered_keys.add(field_name) + except Exception: # noqa: S110 + # If we can't get annotations, just use FILTERED_ARGS + pass + + # Filter out the injected keys from tool_input + return {k: v for k, v in tool_input.items() if k not in filtered_keys} + def _to_args_and_kwargs( self, tool_input: str | dict, tool_call_id: str | None ) -> tuple[tuple, dict]: @@ -718,7 +753,7 @@ class ChildTool(BaseTool): tool_call_id: The ID of the tool call, if available. Returns: - A tuple of (positional_args, keyword_args) for the tool. + A tuple of `(positional_args, keyword_args)` for the tool. Raises: TypeError: If the tool input type is invalid. @@ -767,16 +802,16 @@ class ChildTool(BaseTool): Args: tool_input: The input to the tool. - verbose: Whether to log the tool's progress. Defaults to `None`. - start_color: The color to use when starting the tool. Defaults to 'green'. - color: The color to use when ending the tool. Defaults to 'green'. - callbacks: Callbacks to be called during tool execution. Defaults to `None`. - tags: Optional list of tags associated with the tool. Defaults to `None`. - metadata: Optional metadata associated with the tool. Defaults to `None`. - run_name: The name of the run. Defaults to `None`. - run_id: The id of the run. Defaults to `None`. - config: The configuration for the tool. Defaults to `None`. - tool_call_id: The id of the tool call. Defaults to `None`. + verbose: Whether to log the tool's progress. + start_color: The color to use when starting the tool. + color: The color to use when ending the tool. + callbacks: Callbacks to be called during tool execution. + tags: Optional list of tags associated with the tool. + metadata: Optional metadata associated with the tool. + run_name: The name of the run. + run_id: The id of the run. + config: The configuration for the tool. + tool_call_id: The id of the tool call. **kwargs: Keyword arguments to be passed to tool callbacks (event handler) Returns: @@ -795,17 +830,29 @@ class ChildTool(BaseTool): self.metadata, ) + # Filter out injected arguments from callback inputs + filtered_tool_input = ( + self._filter_injected_args(tool_input) + if isinstance(tool_input, dict) + else None + ) + + # Use filtered inputs for the input_str parameter as well + tool_input_str = ( + tool_input + if isinstance(tool_input, str) + else str( + filtered_tool_input if filtered_tool_input is not None else tool_input + ) + ) + run_manager = callback_manager.on_tool_start( {"name": self.name, "description": self.description}, - tool_input if isinstance(tool_input, str) else str(tool_input), + tool_input_str, color=start_color, name=run_name, run_id=run_id, - # Inputs by definition should always be dicts. - # For now, it's unclear whether this assumption is ever violated, - # but if it is we will send a `None` value to the callback instead - # TODO: will need to address issue via a patch. - inputs=tool_input if isinstance(tool_input, dict) else None, + inputs=filtered_tool_input, **kwargs, ) @@ -825,16 +872,19 @@ class ChildTool(BaseTool): tool_kwargs |= {config_param: config} response = context.run(self._run, *tool_args, **tool_kwargs) if self.response_format == "content_and_artifact": - if not isinstance(response, tuple) or len(response) != 2: - msg = ( - "Since response_format='content_and_artifact' " - "a two-tuple of the message content and raw tool output is " - f"expected. Instead generated response of type: " - f"{type(response)}." - ) + msg = ( + "Since response_format='content_and_artifact' " + "a two-tuple of the message content and raw tool output is " + f"expected. Instead, generated response is of type: " + f"{type(response)}." + ) + if not isinstance(response, tuple): error_to_raise = ValueError(msg) else: - content, artifact = response + try: + content, artifact = response + except ValueError: + error_to_raise = ValueError(msg) else: content = response except (ValidationError, ValidationErrorV1) as e: @@ -879,16 +929,16 @@ class ChildTool(BaseTool): Args: tool_input: The input to the tool. - verbose: Whether to log the tool's progress. Defaults to `None`. - start_color: The color to use when starting the tool. Defaults to 'green'. - color: The color to use when ending the tool. Defaults to 'green'. - callbacks: Callbacks to be called during tool execution. Defaults to `None`. - tags: Optional list of tags associated with the tool. Defaults to `None`. - metadata: Optional metadata associated with the tool. Defaults to `None`. - run_name: The name of the run. Defaults to `None`. - run_id: The id of the run. Defaults to `None`. - config: The configuration for the tool. Defaults to `None`. - tool_call_id: The id of the tool call. Defaults to `None`. + verbose: Whether to log the tool's progress. + start_color: The color to use when starting the tool. + color: The color to use when ending the tool. + callbacks: Callbacks to be called during tool execution. + tags: Optional list of tags associated with the tool. + metadata: Optional metadata associated with the tool. + run_name: The name of the run. + run_id: The id of the run. + config: The configuration for the tool. + tool_call_id: The id of the tool call. **kwargs: Keyword arguments to be passed to tool callbacks Returns: @@ -906,17 +956,30 @@ class ChildTool(BaseTool): metadata, self.metadata, ) + + # Filter out injected arguments from callback inputs + filtered_tool_input = ( + self._filter_injected_args(tool_input) + if isinstance(tool_input, dict) + else None + ) + + # Use filtered inputs for the input_str parameter as well + tool_input_str = ( + tool_input + if isinstance(tool_input, str) + else str( + filtered_tool_input if filtered_tool_input is not None else tool_input + ) + ) + run_manager = await callback_manager.on_tool_start( {"name": self.name, "description": self.description}, - tool_input if isinstance(tool_input, str) else str(tool_input), + tool_input_str, color=start_color, name=run_name, run_id=run_id, - # Inputs by definition should always be dicts. - # For now, it's unclear whether this assumption is ever violated, - # but if it is we will send a `None` value to the callback instead - # TODO: will need to address issue via a patch. - inputs=tool_input if isinstance(tool_input, dict) else None, + inputs=filtered_tool_input, **kwargs, ) content = None @@ -938,16 +1001,19 @@ class ChildTool(BaseTool): coro = self._arun(*tool_args, **tool_kwargs) response = await coro_with_context(coro, context) if self.response_format == "content_and_artifact": - if not isinstance(response, tuple) or len(response) != 2: - msg = ( - "Since response_format='content_and_artifact' " - "a two-tuple of the message content and raw tool output is " - f"expected. Instead generated response of type: " - f"{type(response)}." - ) + msg = ( + "Since response_format='content_and_artifact' " + "a two-tuple of the message content and raw tool output is " + f"expected. Instead, generated response is of type: " + f"{type(response)}." + ) + if not isinstance(response, tuple): error_to_raise = ValueError(msg) else: - content, artifact = response + try: + content, artifact = response + except ValueError: + error_to_raise = ValueError(msg) else: content = response except ValidationError as e: @@ -981,7 +1047,7 @@ def _is_tool_call(x: Any) -> bool: x: The input to check. Returns: - True if the input is a tool call, False otherwise. + `True` if the input is a tool call, `False` otherwise. """ return isinstance(x, dict) and x.get("type") == "tool_call" @@ -995,7 +1061,7 @@ def _handle_validation_error( Args: e: The validation error that occurred. - flag: How to handle the error (bool, string, or callable). + flag: How to handle the error (`bool`, `str`, or `Callable`). Returns: The error message to return. @@ -1027,7 +1093,7 @@ def _handle_tool_error( Args: e: The tool exception that occurred. - flag: How to handle the error (bool, string, or callable). + flag: How to handle the error (`bool`, `str`, or `Callable`). Returns: The error message to return. @@ -1058,12 +1124,12 @@ def _prep_run_args( """Prepare arguments for tool execution. Args: - value: The input value (string, dict, or ToolCall). + value: The input value (`str`, `dict`, or `ToolCall`). config: The runnable configuration. **kwargs: Additional keyword arguments. Returns: - A tuple of (tool_input, run_kwargs). + A tuple of `(tool_input, run_kwargs)`. """ config = ensure_config(config) if _is_tool_call(value): @@ -1094,7 +1160,7 @@ def _format_output( name: str, status: str, ) -> ToolOutputMixin | Any: - """Format tool output as a ToolMessage if appropriate. + """Format tool output as a `ToolMessage` if appropriate. Args: content: The main content of the tool output. @@ -1104,7 +1170,7 @@ def _format_output( status: The execution status. Returns: - The formatted output, either as a ToolMessage or the original content. + The formatted output, either as a `ToolMessage` or the original content. """ if isinstance(content, ToolOutputMixin) or tool_call_id is None: return content @@ -1128,7 +1194,7 @@ def _is_message_content_type(obj: Any) -> bool: obj: The object to check. Returns: - True if the object is valid message content, False otherwise. + `True` if the object is valid message content, `False` otherwise. """ return isinstance(obj, str) or ( isinstance(obj, list) and all(_is_message_content_block(e) for e in obj) @@ -1144,7 +1210,7 @@ def _is_message_content_block(obj: Any) -> bool: obj: The object to check. Returns: - True if the object is a valid content block, False otherwise. + `True` if the object is a valid content block, `False` otherwise. """ if isinstance(obj, str): return True @@ -1175,7 +1241,7 @@ def _get_type_hints(func: Callable) -> dict[str, type] | None: func: The function to get type hints from. Returns: - Dictionary of type hints, or None if extraction fails. + `dict` of type hints, or `None` if extraction fails. """ if isinstance(func, functools.partial): func = func.func @@ -1186,13 +1252,13 @@ def _get_type_hints(func: Callable) -> dict[str, type] | None: def _get_runnable_config_param(func: Callable) -> str | None: - """Find the parameter name for RunnableConfig in a function. + """Find the parameter name for `RunnableConfig` in a function. Args: func: The function to check. Returns: - The parameter name for RunnableConfig, or None if not found. + The parameter name for `RunnableConfig`, or `None` if not found. """ type_hints = _get_type_hints(func) if not type_hints: @@ -1211,6 +1277,28 @@ class InjectedToolArg: """ +class _DirectlyInjectedToolArg: + """Annotation for tool arguments that are injected at runtime. + + Injected via direct type annotation, rather than annotated metadata. + + For example, `ToolRuntime` is a directly injected argument. + + Note the direct annotation rather than the verbose alternative: + `Annotated[ToolRuntime, InjectedRuntime]` + + ```python + from langchain_core.tools import tool, ToolRuntime + + + @tool + def foo(x: int, runtime: ToolRuntime) -> str: + # use runtime.state, runtime.context, runtime.store, etc. + ... + ``` + """ + + class InjectedToolCallId(InjectedToolArg): """Annotation for injecting the tool call ID. @@ -1238,6 +1326,24 @@ class InjectedToolCallId(InjectedToolArg): """ +def _is_directly_injected_arg_type(type_: Any) -> bool: + """Check if a type annotation indicates a directly injected argument. + + This is currently only used for `ToolRuntime`. + Checks if either the annotation itself is a subclass of `_DirectlyInjectedToolArg` + or the origin of the annotation is a subclass of `_DirectlyInjectedToolArg`. + + Ex: `ToolRuntime` or `ToolRuntime[ContextT, StateT]` would both return `True`. + """ + return ( + isinstance(type_, type) and issubclass(type_, _DirectlyInjectedToolArg) + ) or ( + (origin := get_origin(type_)) is not None + and isinstance(origin, type) + and issubclass(origin, _DirectlyInjectedToolArg) + ) + + def _is_injected_arg_type( type_: type | TypeVar, injected_type: type[InjectedToolArg] | None = None ) -> bool: @@ -1248,9 +1354,17 @@ def _is_injected_arg_type( injected_type: The specific injected type to check for. Returns: - True if the type is an injected argument, False otherwise. + `True` if the type is an injected argument, `False` otherwise. """ - injected_type = injected_type or InjectedToolArg + if injected_type is None: + # if no injected type is specified, + # check if the type is a directly injected argument + if _is_directly_injected_arg_type(type_): + return True + injected_type = InjectedToolArg + + # if the type is an Annotated type, check if annotated metadata + # is an intance or subclass of the injected type return any( isinstance(arg, injected_type) or (isinstance(arg, type) and issubclass(arg, injected_type)) @@ -1261,14 +1375,14 @@ def _is_injected_arg_type( def get_all_basemodel_annotations( cls: TypeBaseModel | Any, *, default_to_bound: bool = True ) -> dict[str, type | TypeVar]: - """Get all annotations from a Pydantic BaseModel and its parents. + """Get all annotations from a Pydantic `BaseModel` and its parents. Args: - cls: The Pydantic BaseModel class. - default_to_bound: Whether to default to the bound of a TypeVar if it exists. + cls: The Pydantic `BaseModel` class. + default_to_bound: Whether to default to the bound of a `TypeVar` if it exists. Returns: - A dictionary of field names to their type annotations. + `dict` of field names to their type annotations. """ # cls has no subscript: cls = FooBar if isinstance(cls, type): @@ -1334,15 +1448,15 @@ def _replace_type_vars( *, default_to_bound: bool = True, ) -> type | TypeVar: - """Replace TypeVars in a type annotation with concrete types. + """Replace `TypeVar`s in a type annotation with concrete types. Args: type_: The type annotation to process. - generic_map: Mapping of TypeVars to concrete types. - default_to_bound: Whether to use TypeVar bounds as defaults. + generic_map: Mapping of `TypeVar`s to concrete types. + default_to_bound: Whether to use `TypeVar` bounds as defaults. Returns: - The type with TypeVars replaced. + The type with `TypeVar`s replaced. """ generic_map = generic_map or {} if isinstance(type_, TypeVar): diff --git a/libs/core/langchain_core/tools/convert.py b/libs/core/langchain_core/tools/convert.py index 0bc4e2e98b6..1dabed5e002 100644 --- a/libs/core/langchain_core/tools/convert.py +++ b/libs/core/langchain_core/tools/convert.py @@ -81,61 +81,72 @@ def tool( parse_docstring: bool = False, error_on_invalid_docstring: bool = True, ) -> BaseTool | Callable[[Callable | Runnable], BaseTool]: - """Make tools out of functions, can be used with or without arguments. + """Convert Python functions and `Runnables` to LangChain tools. + + Can be used as a decorator with or without arguments to create tools from functions. + + Functions can have any signature - the tool will automatically infer input schemas + unless disabled. + + !!! note "Requirements" + - Functions must have type hints for proper schema inference + - When `infer_schema=False`, functions must be `(str) -> str` and have + docstrings + - When using with `Runnable`, a string name must be provided Args: - name_or_callable: Optional name of the tool or the callable to be - converted to a tool. Must be provided as a positional argument. - runnable: Optional runnable to convert to a tool. Must be provided as a - positional argument. + name_or_callable: Optional name of the tool or the `Callable` to be + converted to a tool. Overrides the function's name. + + Must be provided as a positional argument. + runnable: Optional `Runnable` to convert to a tool. + + Must be provided as a positional argument. description: Optional description for the tool. + Precedence for the tool description value is as follows: - - `description` argument + - This `description` argument (used even if docstring and/or `args_schema` are provided) - - tool function docstring + - Tool function docstring (used even if `args_schema` is provided) - `args_schema` description - (used only if `description` / docstring are not provided) + (used only if `description` and docstring are not provided) *args: Extra positional arguments. Must be empty. - return_direct: Whether to return directly from the tool rather - than continuing the agent loop. Defaults to `False`. - args_schema: optional argument schema for user to specify. - Defaults to `None`. - infer_schema: Whether to infer the schema of the arguments from - the function's signature. This also makes the resultant tool - accept a dictionary input to its `run()` function. - Defaults to `True`. - response_format: The tool response format. If "content" then the output of - the tool is interpreted as the contents of a ToolMessage. If - "content_and_artifact" then the output is expected to be a two-tuple - corresponding to the (content, artifact) of a ToolMessage. - Defaults to "content". - parse_docstring: if `infer_schema` and `parse_docstring`, will attempt to + return_direct: Whether to return directly from the tool rather than continuing + the agent loop. + args_schema: Optional argument schema for user to specify. + infer_schema: Whether to infer the schema of the arguments from the function's + signature. This also makes the resultant tool accept a dictionary input to + its `run()` function. + response_format: The tool response format. + + If `'content'`, then the output of the tool is interpreted as the contents + of a `ToolMessage`. + + If `'content_and_artifact'`, then the output is expected to be a two-tuple + corresponding to the `(content, artifact)` of a `ToolMessage`. + parse_docstring: If `infer_schema` and `parse_docstring`, will attempt to parse parameter descriptions from Google Style function docstrings. - Defaults to `False`. - error_on_invalid_docstring: if `parse_docstring` is provided, configure - whether to raise ValueError on invalid Google Style docstrings. - Defaults to `True`. + error_on_invalid_docstring: If `parse_docstring` is provided, configure + whether to raise `ValueError` on invalid Google Style docstrings. Raises: - ValueError: If too many positional arguments are provided. - ValueError: If a runnable is provided without a string name. + ValueError: If too many positional arguments are provided (e.g. violating the + `*args` constraint). + ValueError: If a `Runnable` is provided without a string name. When using `tool` + with a `Runnable`, a `str` name must be provided as the `name_or_callable`. ValueError: If the first argument is not a string or callable with a `__name__` attribute. ValueError: If the function does not have a docstring and description - is not provided and `infer_schema` is False. - ValueError: If `parse_docstring` is True and the function has an invalid + is not provided and `infer_schema` is `False`. + ValueError: If `parse_docstring` is `True` and the function has an invalid Google-style docstring and `error_on_invalid_docstring` is True. - ValueError: If a Runnable is provided that does not have an object schema. + ValueError: If a `Runnable` is provided that does not have an object schema. Returns: The tool. - Requires: - - Function must be of type (str) -> str - - Function must have a docstring - Examples: ```python @tool @@ -155,8 +166,6 @@ def tool( return "partial json of results", {"full": "object of results"} ``` - !!! version-added "Added in version 0.2.14" - Parse Google-style docstrings: ```python @@ -197,7 +206,7 @@ def tool( Note that parsing by default will raise `ValueError` if the docstring is considered invalid. A docstring is considered invalid if it contains arguments not in the function signature, or is unable to be parsed into - a summary and "Args:" blocks. Examples below: + a summary and `"Args:"` blocks. Examples below: ```python # No args section @@ -397,10 +406,10 @@ def convert_runnable_to_tool( Args: runnable: The runnable to convert. - args_schema: The schema for the tool's input arguments. Defaults to `None`. - name: The name of the tool. Defaults to `None`. - description: The description of the tool. Defaults to `None`. - arg_types: The types of the arguments. Defaults to `None`. + args_schema: The schema for the tool's input arguments. + name: The name of the tool. + description: The description of the tool. + arg_types: The types of the arguments. Returns: The tool. diff --git a/libs/core/langchain_core/tools/retriever.py b/libs/core/langchain_core/tools/retriever.py index fff2fe3fe87..5677e97bd74 100644 --- a/libs/core/langchain_core/tools/retriever.py +++ b/libs/core/langchain_core/tools/retriever.py @@ -81,13 +81,14 @@ def create_retriever_tool( so should be unique and somewhat descriptive. description: The description for the tool. This will be passed to the language model, so should be descriptive. - document_prompt: The prompt to use for the document. Defaults to `None`. - document_separator: The separator to use between documents. Defaults to "\n\n". - response_format: The tool response format. If "content" then the output of - the tool is interpreted as the contents of a ToolMessage. If - "content_and_artifact" then the output is expected to be a two-tuple - corresponding to the (content, artifact) of a ToolMessage (artifact - being a list of documents in this case). Defaults to "content". + document_prompt: The prompt to use for the document. + document_separator: The separator to use between documents. + response_format: The tool response format. + + If `"content"` then the output of the tool is interpreted as the contents of + a `ToolMessage`. If `"content_and_artifact"` then the output is expected to + be a two-tuple corresponding to the `(content, artifact)` of a `ToolMessage` + (artifact being a list of documents in this case). Returns: Tool class to pass to an agent. diff --git a/libs/core/langchain_core/tools/simple.py b/libs/core/langchain_core/tools/simple.py index 249fc41fefd..68c70e61258 100644 --- a/libs/core/langchain_core/tools/simple.py +++ b/libs/core/langchain_core/tools/simple.py @@ -69,7 +69,7 @@ class Tool(BaseTool): def _to_args_and_kwargs( self, tool_input: str | dict, tool_call_id: str | None ) -> tuple[tuple, dict]: - """Convert tool input to pydantic model. + """Convert tool input to Pydantic model. Args: tool_input: The input to the tool. @@ -79,8 +79,7 @@ class Tool(BaseTool): ToolException: If the tool input is invalid. Returns: - the pydantic model args and kwargs. - + The Pydantic model args and kwargs. """ args, kwargs = super()._to_args_and_kwargs(tool_input, tool_call_id) # For backwards compatibility. The tool must be run with a single input @@ -177,9 +176,9 @@ class Tool(BaseTool): func: The function to create the tool from. name: The name of the tool. description: The description of the tool. - return_direct: Whether to return the output directly. Defaults to `False`. - args_schema: The schema of the tool's input arguments. Defaults to `None`. - coroutine: The asynchronous version of the function. Defaults to `None`. + return_direct: Whether to return the output directly. + args_schema: The schema of the tool's input arguments. + coroutine: The asynchronous version of the function. **kwargs: Additional arguments to pass to the tool. Returns: diff --git a/libs/core/langchain_core/tools/structured.py b/libs/core/langchain_core/tools/structured.py index cc978446f3b..43e981570a0 100644 --- a/libs/core/langchain_core/tools/structured.py +++ b/libs/core/langchain_core/tools/structured.py @@ -149,21 +149,18 @@ class StructuredTool(BaseTool): description: The description of the tool. Defaults to the function docstring. return_direct: Whether to return the result directly or as a callback. - Defaults to `False`. - args_schema: The schema of the tool's input arguments. Defaults to `None`. + args_schema: The schema of the tool's input arguments. infer_schema: Whether to infer the schema from the function's signature. - Defaults to `True`. - response_format: The tool response format. If "content" then the output of - the tool is interpreted as the contents of a ToolMessage. If - "content_and_artifact" then the output is expected to be a two-tuple - corresponding to the (content, artifact) of a ToolMessage. - Defaults to "content". - parse_docstring: if `infer_schema` and `parse_docstring`, will attempt + response_format: The tool response format. + + If `"content"` then the output of the tool is interpreted as the + contents of a `ToolMessage`. If `"content_and_artifact"` then the output + is expected to be a two-tuple corresponding to the `(content, artifact)` + of a `ToolMessage`. + parse_docstring: If `infer_schema` and `parse_docstring`, will attempt to parse parameter descriptions from Google Style function docstrings. - Defaults to `False`. error_on_invalid_docstring: if `parse_docstring` is provided, configure - whether to raise ValueError on invalid Google Style docstrings. - Defaults to `False`. + whether to raise `ValueError` on invalid Google Style docstrings. **kwargs: Additional arguments to pass to the tool Returns: diff --git a/libs/core/langchain_core/tracers/base.py b/libs/core/langchain_core/tracers/base.py index 1ff9c9ed0cb..01bc9da0aa8 100644 --- a/libs/core/langchain_core/tracers/base.py +++ b/libs/core/langchain_core/tracers/base.py @@ -67,9 +67,9 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC): serialized: The serialized model. messages: The messages to start the chat with. run_id: The run ID. - tags: The tags for the run. Defaults to `None`. - parent_run_id: The parent run ID. Defaults to `None`. - metadata: The metadata for the run. Defaults to `None`. + tags: The tags for the run. + parent_run_id: The parent run ID. + metadata: The metadata for the run. name: The name of the run. **kwargs: Additional arguments. @@ -108,9 +108,9 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC): serialized: The serialized model. prompts: The prompts to start the LLM with. run_id: The run ID. - tags: The tags for the run. Defaults to `None`. - parent_run_id: The parent run ID. Defaults to `None`. - metadata: The metadata for the run. Defaults to `None`. + tags: The tags for the run. + parent_run_id: The parent run ID. + metadata: The metadata for the run. name: The name of the run. **kwargs: Additional arguments. @@ -145,9 +145,9 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC): Args: token: The token. - chunk: The chunk. Defaults to `None`. + chunk: The chunk. run_id: The run ID. - parent_run_id: The parent run ID. Defaults to `None`. + parent_run_id: The parent run ID. **kwargs: Additional arguments. Returns: @@ -255,10 +255,10 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC): serialized: The serialized chain. inputs: The inputs for the chain. run_id: The run ID. - tags: The tags for the run. Defaults to `None`. - parent_run_id: The parent run ID. Defaults to `None`. - metadata: The metadata for the run. Defaults to `None`. - run_type: The type of the run. Defaults to `None`. + tags: The tags for the run. + parent_run_id: The parent run ID. + metadata: The metadata for the run. + run_type: The type of the run. name: The name of the run. **kwargs: Additional arguments. @@ -294,7 +294,7 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC): Args: outputs: The outputs for the chain. run_id: The run ID. - inputs: The inputs for the chain. Defaults to `None`. + inputs: The inputs for the chain. **kwargs: Additional arguments. Returns: @@ -322,7 +322,7 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC): Args: error: The error. - inputs: The inputs for the chain. Defaults to `None`. + inputs: The inputs for the chain. run_id: The run ID. **kwargs: Additional arguments. @@ -357,9 +357,9 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC): serialized: The serialized tool. input_str: The input string. run_id: The run ID. - tags: The tags for the run. Defaults to `None`. - parent_run_id: The parent run ID. Defaults to `None`. - metadata: The metadata for the run. Defaults to `None`. + tags: The tags for the run. + parent_run_id: The parent run ID. + metadata: The metadata for the run. name: The name of the run. inputs: The inputs for the tool. **kwargs: Additional arguments. @@ -446,9 +446,9 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC): serialized: The serialized retriever. query: The query. run_id: The run ID. - parent_run_id: The parent run ID. Defaults to `None`. - tags: The tags for the run. Defaults to `None`. - metadata: The metadata for the run. Defaults to `None`. + parent_run_id: The parent run ID. + tags: The tags for the run. + metadata: The metadata for the run. name: The name of the run. **kwargs: Additional arguments. diff --git a/libs/core/langchain_core/tracers/context.py b/libs/core/langchain_core/tracers/context.py index 2dc2043a920..982054b1322 100644 --- a/libs/core/langchain_core/tracers/context.py +++ b/libs/core/langchain_core/tracers/context.py @@ -48,9 +48,9 @@ def tracing_v2_enabled( Args: project_name: The name of the project. Defaults to `'default'`. - example_id: The ID of the example. Defaults to `None`. - tags: The tags to add to the run. Defaults to `None`. - client: The client of the langsmith. Defaults to `None`. + example_id: The ID of the example. + tags: The tags to add to the run. + client: The client of the langsmith. Yields: The LangChain tracer. diff --git a/libs/core/langchain_core/tracers/event_stream.py b/libs/core/langchain_core/tracers/event_stream.py index 67a2f81ce6c..acccd979d1c 100644 --- a/libs/core/langchain_core/tracers/event_stream.py +++ b/libs/core/langchain_core/tracers/event_stream.py @@ -128,7 +128,10 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand exclude_tags=exclude_tags, ) - loop = asyncio.get_event_loop() + try: + loop = asyncio.get_event_loop() + except RuntimeError: + loop = asyncio.new_event_loop() memory_stream = _MemoryStream[StreamEvent](loop) self.send_stream = memory_stream.get_send_stream() self.receive_stream = memory_stream.get_receive_stream() diff --git a/libs/core/langchain_core/tracers/langchain.py b/libs/core/langchain_core/tracers/langchain.py index d1928622cb5..15186007d6a 100644 --- a/libs/core/langchain_core/tracers/langchain.py +++ b/libs/core/langchain_core/tracers/langchain.py @@ -134,10 +134,10 @@ class LangChainTracer(BaseTracer): serialized: The serialized model. messages: The messages. run_id: The run ID. - tags: The tags. Defaults to `None`. - parent_run_id: The parent run ID. Defaults to `None`. - metadata: The metadata. Defaults to `None`. - name: The name. Defaults to `None`. + tags: The tags. + parent_run_id: The parent run ID. + metadata: The metadata. + name: The name. **kwargs: Additional keyword arguments. Returns: diff --git a/libs/core/langchain_core/tracers/log_stream.py b/libs/core/langchain_core/tracers/log_stream.py index 65a6a1baf7c..345d5176735 100644 --- a/libs/core/langchain_core/tracers/log_stream.py +++ b/libs/core/langchain_core/tracers/log_stream.py @@ -96,10 +96,10 @@ class RunLogPatch: """Patch to the run log.""" ops: list[dict[str, Any]] - """List of jsonpatch operations, which describe how to create the run state + """List of JSONPatch operations, which describe how to create the run state from an empty dict. This is the minimal representation of the log, designed to be serialized as JSON and sent over the wire to reconstruct the log on the other - side. Reconstruction of the state can be done with any jsonpatch-compliant library, + side. Reconstruction of the state can be done with any JSONPatch-compliant library, see https://jsonpatch.com for more information.""" def __init__(self, *ops: dict[str, Any]) -> None: @@ -190,7 +190,7 @@ class RunLog(RunLogPatch): other: The other `RunLog` to compare to. Returns: - True if the `RunLog`s are equal, False otherwise. + `True` if the `RunLog`s are equal, `False` otherwise. """ # First compare that the state is the same if not isinstance(other, RunLog): @@ -264,7 +264,10 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler): self.exclude_types = exclude_types self.exclude_tags = exclude_tags - loop = asyncio.get_event_loop() + try: + loop = asyncio.get_event_loop() + except RuntimeError: + loop = asyncio.new_event_loop() memory_stream = _MemoryStream[RunLogPatch](loop) self.lock = threading.Lock() self.send_stream = memory_stream.get_send_stream() @@ -288,7 +291,7 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler): *ops: The operations to send to the stream. Returns: - True if the patch was sent successfully, False if the stream is closed. + `True` if the patch was sent successfully, False if the stream is closed. """ # We will likely want to wrap this in try / except at some point # to handle exceptions that might arise at run time. @@ -368,7 +371,7 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler): run: The Run to check. Returns: - True if the run should be included, False otherwise. + `True` if the run should be included, `False` otherwise. """ if run.id == self.root_id: return False diff --git a/libs/core/langchain_core/tracers/memory_stream.py b/libs/core/langchain_core/tracers/memory_stream.py index a59e47821d4..0a9facf17af 100644 --- a/libs/core/langchain_core/tracers/memory_stream.py +++ b/libs/core/langchain_core/tracers/memory_stream.py @@ -5,7 +5,7 @@ channel. The writer and reader can be in the same event loop or in different eve loops. When they're in different event loops, they will also be in different threads. -This is useful in situations when there's a mix of synchronous and asynchronous +Useful in situations when there's a mix of synchronous and asynchronous used in the code. """ diff --git a/libs/core/langchain_core/tracers/root_listeners.py b/libs/core/langchain_core/tracers/root_listeners.py index 043805c16d4..923cd1c16f6 100644 --- a/libs/core/langchain_core/tracers/root_listeners.py +++ b/libs/core/langchain_core/tracers/root_listeners.py @@ -24,7 +24,7 @@ class RootListenersTracer(BaseTracer): """Tracer that calls listeners on run start, end, and error.""" log_missing_parent = False - """Whether to log a warning if the parent is missing. Default is False.""" + """Whether to log a warning if the parent is missing.""" def __init__( self, @@ -79,7 +79,7 @@ class AsyncRootListenersTracer(AsyncBaseTracer): """Async Tracer that calls listeners on run start, end, and error.""" log_missing_parent = False - """Whether to log a warning if the parent is missing. Default is False.""" + """Whether to log a warning if the parent is missing.""" def __init__( self, diff --git a/libs/core/langchain_core/tracers/schemas.py b/libs/core/langchain_core/tracers/schemas.py index f06ad0a80d4..67a37035b4d 100644 --- a/libs/core/langchain_core/tracers/schemas.py +++ b/libs/core/langchain_core/tracers/schemas.py @@ -2,140 +2,13 @@ from __future__ import annotations -import warnings -from datetime import datetime, timezone -from typing import Any -from uuid import UUID - from langsmith import RunTree -from langsmith.schemas import RunTypeEnum as RunTypeEnumDep -from pydantic import PydanticDeprecationWarning -from pydantic.v1 import BaseModel as BaseModelV1 -from pydantic.v1 import Field as FieldV1 - -from langchain_core._api import deprecated - - -@deprecated("0.1.0", alternative="Use string instead.", removal="1.0") -def RunTypeEnum() -> type[RunTypeEnumDep]: # noqa: N802 - """`RunTypeEnum`. - - Returns: - The `RunTypeEnum` class. - """ - warnings.warn( - "RunTypeEnum is deprecated. Please directly use a string instead" - " (e.g. 'llm', 'chain', 'tool').", - DeprecationWarning, - stacklevel=2, - ) - return RunTypeEnumDep - - -@deprecated("0.1.0", removal="1.0") -class TracerSessionV1Base(BaseModelV1): - """Base class for TracerSessionV1.""" - - start_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc)) - name: str | None = None - extra: dict[str, Any] | None = None - - -@deprecated("0.1.0", removal="1.0") -class TracerSessionV1Create(TracerSessionV1Base): - """Create class for TracerSessionV1.""" - - -@deprecated("0.1.0", removal="1.0") -class TracerSessionV1(TracerSessionV1Base): - """TracerSessionV1 schema.""" - - id: int - - -@deprecated("0.1.0", removal="1.0") -class TracerSessionBase(TracerSessionV1Base): - """Base class for TracerSession.""" - - tenant_id: UUID - - -@deprecated("0.1.0", removal="1.0") -class TracerSession(TracerSessionBase): - """TracerSessionV1 schema for the V2 API.""" - - id: UUID - - -@deprecated("0.1.0", alternative="Run", removal="1.0") -class BaseRun(BaseModelV1): - """Base class for Run.""" - - uuid: str - parent_uuid: str | None = None - start_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc)) - end_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc)) - extra: dict[str, Any] | None = None - execution_order: int - child_execution_order: int - serialized: dict[str, Any] - session_id: int - error: str | None = None - - -@deprecated("0.1.0", alternative="Run", removal="1.0") -class LLMRun(BaseRun): - """Class for LLMRun.""" - - prompts: list[str] - - -@deprecated("0.1.0", alternative="Run", removal="1.0") -class ChainRun(BaseRun): - """Class for ChainRun.""" - - inputs: dict[str, Any] - outputs: dict[str, Any] | None = None - child_llm_runs: list[LLMRun] = FieldV1(default_factory=list) - child_chain_runs: list[ChainRun] = FieldV1(default_factory=list) - child_tool_runs: list[ToolRun] = FieldV1(default_factory=list) - - -@deprecated("0.1.0", alternative="Run", removal="1.0") -class ToolRun(BaseRun): - """Class for ToolRun.""" - - tool_input: str - output: str | None = None - action: str - child_llm_runs: list[LLMRun] = FieldV1(default_factory=list) - child_chain_runs: list[ChainRun] = FieldV1(default_factory=list) - child_tool_runs: list[ToolRun] = FieldV1(default_factory=list) - # Begin V2 API Schemas Run = RunTree # For backwards compatibility -# TODO: Update once langsmith moves to Pydantic V2 and we can swap Run.model_rebuild -# for Run.update_forward_refs -with warnings.catch_warnings(): - warnings.simplefilter("ignore", category=PydanticDeprecationWarning) - - ChainRun.update_forward_refs() - ToolRun.update_forward_refs() - __all__ = [ - "BaseRun", - "ChainRun", - "LLMRun", "Run", - "RunTypeEnum", - "ToolRun", - "TracerSession", - "TracerSessionBase", - "TracerSessionV1", - "TracerSessionV1Base", - "TracerSessionV1Create", ] diff --git a/libs/core/langchain_core/tracers/stdout.py b/libs/core/langchain_core/tracers/stdout.py index 72e6cee19a6..119b9127fd6 100644 --- a/libs/core/langchain_core/tracers/stdout.py +++ b/libs/core/langchain_core/tracers/stdout.py @@ -49,8 +49,7 @@ class FunctionCallbackHandler(BaseTracer): """Tracer that calls a function with a single str parameter.""" name: str = "function_callback_handler" - """The name of the tracer. This is used to identify the tracer in the logs. - Default is "function_callback_handler".""" + """The name of the tracer. This is used to identify the tracer in the logs.""" def __init__(self, function: Callable[[str], None], **kwargs: Any) -> None: """Create a FunctionCallbackHandler. diff --git a/libs/core/langchain_core/utils/__init__.py b/libs/core/langchain_core/utils/__init__.py index e16f3c11583..04216583702 100644 --- a/libs/core/langchain_core/utils/__init__.py +++ b/libs/core/langchain_core/utils/__init__.py @@ -1,4 +1,4 @@ -"""**Utility functions** for LangChain. +"""Utility functions for LangChain. These functions do not depend on any other LangChain module. """ diff --git a/libs/core/langchain_core/utils/aiter.py b/libs/core/langchain_core/utils/aiter.py index e910cee43c6..b196b43aba8 100644 --- a/libs/core/langchain_core/utils/aiter.py +++ b/libs/core/langchain_core/utils/aiter.py @@ -201,9 +201,9 @@ class Tee(Generic[T]): Args: iterable: The iterable to split. - n: The number of iterators to create. Defaults to 2. + n: The number of iterators to create. lock: The lock to synchronise access to the shared buffers. - Defaults to `None`. + """ self._iterator = iterable.__aiter__() # before 3.10 aiter() doesn't exist self._buffers: list[deque[T]] = [deque() for _ in range(n)] diff --git a/libs/core/langchain_core/utils/env.py b/libs/core/langchain_core/utils/env.py index 5d9abe916e5..f19928e8761 100644 --- a/libs/core/langchain_core/utils/env.py +++ b/libs/core/langchain_core/utils/env.py @@ -13,7 +13,7 @@ def env_var_is_set(env_var: str) -> bool: env_var: The name of the environment variable. Returns: - True if the environment variable is set, False otherwise. + `True` if the environment variable is set, `False` otherwise. """ return env_var in os.environ and os.environ[env_var] not in { "", @@ -38,7 +38,7 @@ def get_from_dict_or_env( env_key: The environment variable to look up if the key is not in the dictionary. default: The default value to return if the key is not in the dictionary - or the environment. Defaults to `None`. + or the environment. Returns: The dict value or the environment variable value. @@ -64,7 +64,7 @@ def get_from_env(key: str, env_key: str, default: str | None = None) -> str: env_key: The environment variable to look up if the key is not in the dictionary. default: The default value to return if the key is not in the dictionary - or the environment. Defaults to `None`. + or the environment. Returns: The value of the key. diff --git a/libs/core/langchain_core/utils/function_calling.py b/libs/core/langchain_core/utils/function_calling.py index 5ba5efa59e7..f96b3ee1a9b 100644 --- a/libs/core/langchain_core/utils/function_calling.py +++ b/libs/core/langchain_core/utils/function_calling.py @@ -27,7 +27,7 @@ from pydantic.v1 import create_model as create_model_v1 from typing_extensions import TypedDict, is_typeddict import langchain_core -from langchain_core._api import beta, deprecated +from langchain_core._api import beta from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, ToolMessage from langchain_core.utils.json_schema import dereference_refs from langchain_core.utils.pydantic import is_basemodel_subclass @@ -114,7 +114,7 @@ def _convert_json_schema_to_openai_function( used. description: The description of the function. If not provided, the description of the schema will be used. - rm_titles: Whether to remove titles from the schema. Defaults to `True`. + rm_titles: Whether to remove titles from the schema. Returns: The function description. @@ -148,7 +148,7 @@ def _convert_pydantic_to_openai_function( used. description: The description of the function. If not provided, the description of the schema will be used. - rm_titles: Whether to remove titles from the schema. Defaults to `True`. + rm_titles: Whether to remove titles from the schema. Raises: TypeError: If the model is not a Pydantic model. @@ -168,42 +168,6 @@ def _convert_pydantic_to_openai_function( ) -convert_pydantic_to_openai_function = deprecated( - "0.1.16", - alternative="langchain_core.utils.function_calling.convert_to_openai_function()", - removal="1.0", -)(_convert_pydantic_to_openai_function) - - -@deprecated( - "0.1.16", - alternative="langchain_core.utils.function_calling.convert_to_openai_tool()", - removal="1.0", -) -def convert_pydantic_to_openai_tool( - model: type[BaseModel], - *, - name: str | None = None, - description: str | None = None, -) -> ToolDescription: - """Converts a Pydantic model to a function description for the OpenAI API. - - Args: - model: The Pydantic model to convert. - name: The name of the function. If not provided, the title of the schema will be - used. - description: The description of the function. If not provided, the description - of the schema will be used. - - Returns: - The tool description. - """ - function = _convert_pydantic_to_openai_function( - model, name=name, description=description - ) - return {"type": "function", "function": function} - - def _get_python_function_name(function: Callable) -> str: """Get the name of a Python function.""" return function.__name__ @@ -240,13 +204,6 @@ def _convert_python_function_to_openai_function( ) -convert_python_function_to_openai_function = deprecated( - "0.1.16", - alternative="langchain_core.utils.function_calling.convert_to_openai_function()", - removal="1.0", -)(_convert_python_function_to_openai_function) - - def _convert_typed_dict_to_openai_function(typed_dict: type) -> FunctionDescription: visited: dict = {} @@ -368,31 +325,6 @@ def _format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription: } -format_tool_to_openai_function = deprecated( - "0.1.16", - alternative="langchain_core.utils.function_calling.convert_to_openai_function()", - removal="1.0", -)(_format_tool_to_openai_function) - - -@deprecated( - "0.1.16", - alternative="langchain_core.utils.function_calling.convert_to_openai_tool()", - removal="1.0", -) -def format_tool_to_openai_tool(tool: BaseTool) -> ToolDescription: - """Format tool into the OpenAI function API. - - Args: - tool: The tool to format. - - Returns: - The tool description. - """ - function = _format_tool_to_openai_function(tool) - return {"type": "function", "function": function} - - def convert_to_openai_function( function: dict[str, Any] | type | Callable | BaseTool, *, @@ -402,11 +334,11 @@ def convert_to_openai_function( Args: function: - A dictionary, Pydantic BaseModel class, TypedDict class, a LangChain - Tool object, or a Python function. If a dictionary is passed in, it is + A dictionary, Pydantic `BaseModel` class, `TypedDict` class, a LangChain + `Tool` object, or a Python function. If a dictionary is passed in, it is assumed to already be a valid OpenAI function, a JSON schema with - top-level 'title' key specified, an Anthropic format - tool, or an Amazon Bedrock Converse format tool. + top-level `title` key specified, an Anthropic format tool, or an Amazon + Bedrock Converse format tool. strict: If `True`, model output is guaranteed to exactly match the JSON Schema provided in the function definition. If `None`, `strict` argument will not @@ -419,17 +351,8 @@ def convert_to_openai_function( Raises: ValueError: If function is not in a supported format. - !!! warning "Behavior changed in 0.2.29" - `strict` arg added. - - !!! warning "Behavior changed in 0.3.13" - Support for Anthropic format tools added. - - !!! warning "Behavior changed in 0.3.14" - Support for Amazon Bedrock Converse format tools added. - - !!! warning "Behavior changed in 0.3.16" - 'description' and 'parameters' keys are now optional. Only 'name' is + !!! warning "Behavior changed in `langchain-core` 0.3.16" + `description` and `parameters` keys are now optional. Only `name` is required and guaranteed to be part of the output. """ # an Anthropic format tool @@ -489,7 +412,7 @@ def convert_to_openai_function( if strict is not None: if "strict" in oai_function and oai_function["strict"] != strict: msg = ( - f"Tool/function already has a 'strict' key wth value " + f"Tool/function already has a 'strict' key with value " f"{oai_function['strict']} which is different from the explicit " f"`strict` arg received {strict=}." ) @@ -502,6 +425,14 @@ def convert_to_openai_function( oai_function["parameters"] = _recursive_set_additional_properties_false( oai_function["parameters"] ) + # All fields must be `required` + parameters = oai_function.get("parameters") + if isinstance(parameters, dict): + fields = parameters.get("properties") + if isinstance(fields, dict) and fields: + parameters = dict(parameters) + parameters["required"] = list(fields.keys()) + oai_function["parameters"] = parameters return oai_function @@ -527,16 +458,14 @@ def convert_to_openai_tool( ) -> dict[str, Any]: """Convert a tool-like object to an OpenAI tool schema. - OpenAI tool schema reference: - https://platform.openai.com/docs/api-reference/chat/create#chat-create-tools + [OpenAI tool schema reference](https://platform.openai.com/docs/api-reference/chat/create#chat-create-tools) Args: tool: - Either a dictionary, a pydantic.BaseModel class, Python function, or - BaseTool. If a dictionary is passed in, it is - assumed to already be a valid OpenAI function, a JSON schema with - top-level 'title' key specified, an Anthropic format - tool, or an Amazon Bedrock Converse format tool. + Either a dictionary, a `pydantic.BaseModel` class, Python function, or + `BaseTool`. If a dictionary is passed in, it is assumed to already be a + valid OpenAI function, a JSON schema with top-level `title` key specified, + an Anthropic format tool, or an Amazon Bedrock Converse format tool. strict: If `True`, model output is guaranteed to exactly match the JSON Schema provided in the function definition. If `None`, `strict` argument will not @@ -546,28 +475,16 @@ def convert_to_openai_tool( A dict version of the passed in tool which is compatible with the OpenAI tool-calling API. - !!! warning "Behavior changed in 0.2.29" - `strict` arg added. - - !!! warning "Behavior changed in 0.3.13" - Support for Anthropic format tools added. - - !!! warning "Behavior changed in 0.3.14" - Support for Amazon Bedrock Converse format tools added. - - !!! warning "Behavior changed in 0.3.16" - 'description' and 'parameters' keys are now optional. Only 'name' is + !!! warning "Behavior changed in `langchain-core` 0.3.16" + `description` and `parameters` keys are now optional. Only `name` is required and guaranteed to be part of the output. - !!! warning "Behavior changed in 0.3.44" + !!! warning "Behavior changed in `langchain-core` 0.3.44" Return OpenAI Responses API-style tools unchanged. This includes - any dict with "type" in "file_search", "function", "computer_use_preview", - "web_search_preview". + any dict with `"type"` in `"file_search"`, `"function"`, + `"computer_use_preview"`, `"web_search_preview"`. - !!! warning "Behavior changed in 0.3.61" - Added support for OpenAI's built-in code interpreter and remote MCP tools. - - !!! warning "Behavior changed in 0.3.63" + !!! warning "Behavior changed in `langchain-core` 0.3.63" Added support for OpenAI's image generation built-in tool. """ # Import locally to prevent circular import @@ -665,7 +582,7 @@ def tool_example_to_messages( tool_calls: Tool calls represented as Pydantic BaseModels tool_outputs: Tool call outputs. Does not need to be provided. If not provided, a placeholder value - will be inserted. Defaults to `None`. + will be inserted. ai_response: If provided, content for a final `AIMessage`. Returns: @@ -713,7 +630,7 @@ def tool_example_to_messages( "type": "function", "function": { # The name of the function right now corresponds to the name - # of the pydantic model. This is implicit in the API right now, + # of the Pydantic model. This is implicit in the API right now, # and will be improved over time. "name": tool_call.__class__.__name__, "arguments": tool_call.model_dump_json(), @@ -736,6 +653,9 @@ def tool_example_to_messages( return messages +_MIN_DOCSTRING_BLOCKS = 2 + + def _parse_google_docstring( docstring: str | None, args: list[str], @@ -754,7 +674,7 @@ def _parse_google_docstring( arg for arg in args if arg not in {"run_manager", "callbacks", "return"} } if filtered_annotations and ( - len(docstring_blocks) < 2 + len(docstring_blocks) < _MIN_DOCSTRING_BLOCKS or not any(block.startswith("Args:") for block in docstring_blocks[1:]) ): msg = "Found invalid Google-Style docstring." diff --git a/libs/core/langchain_core/utils/input.py b/libs/core/langchain_core/utils/input.py index bedb58cb625..d8946230bf3 100644 --- a/libs/core/langchain_core/utils/input.py +++ b/libs/core/langchain_core/utils/input.py @@ -26,6 +26,9 @@ def get_color_mapping( colors = list(_TEXT_COLOR_MAPPING.keys()) if excluded_colors is not None: colors = [c for c in colors if c not in excluded_colors] + if not colors: + msg = "No colors available after applying exclusions." + raise ValueError(msg) return {item: colors[i % len(colors)] for i, item in enumerate(items)} @@ -65,9 +68,9 @@ def print_text( Args: text: The text to print. - color: The color to use. Defaults to `None`. - end: The end character to use. Defaults to "". - file: The file to write to. Defaults to `None`. + color: The color to use. + end: The end character to use. + file: The file to write to. """ text_to_print = get_colored_text(text, color) if color else text print(text_to_print, end=end, file=file) diff --git a/libs/core/langchain_core/utils/interactive_env.py b/libs/core/langchain_core/utils/interactive_env.py index 305b8edc146..f86fe0763b2 100644 --- a/libs/core/langchain_core/utils/interactive_env.py +++ b/libs/core/langchain_core/utils/interactive_env.py @@ -7,6 +7,6 @@ def is_interactive_env() -> bool: """Determine if running within IPython or Jupyter. Returns: - True if running in an interactive environment, False otherwise. + True if running in an interactive environment, `False` otherwise. """ return hasattr(sys, "ps2") diff --git a/libs/core/langchain_core/utils/iter.py b/libs/core/langchain_core/utils/iter.py index 2334d9063f6..a4f9b0e1ade 100644 --- a/libs/core/langchain_core/utils/iter.py +++ b/libs/core/langchain_core/utils/iter.py @@ -137,9 +137,9 @@ class Tee(Generic[T]): Args: iterable: The iterable to split. - n: The number of iterators to create. Defaults to 2. + n: The number of iterators to create. lock: The lock to synchronise access to the shared buffers. - Defaults to `None`. + """ self._iterator = iter(iterable) self._buffers: list[deque[T]] = [deque() for _ in range(n)] diff --git a/libs/core/langchain_core/utils/json.py b/libs/core/langchain_core/utils/json.py index b1e3879a476..9c00b11f94f 100644 --- a/libs/core/langchain_core/utils/json.py +++ b/libs/core/langchain_core/utils/json.py @@ -51,7 +51,7 @@ def parse_partial_json(s: str, *, strict: bool = False) -> Any: Args: s: The JSON string to parse. - strict: Whether to use strict parsing. Defaults to `False`. + strict: Whether to use strict parsing. Returns: The parsed JSON object as a Python dictionary. diff --git a/libs/core/langchain_core/utils/json_schema.py b/libs/core/langchain_core/utils/json_schema.py index 6118bd95dc2..68b04beb8c8 100644 --- a/libs/core/langchain_core/utils/json_schema.py +++ b/libs/core/langchain_core/utils/json_schema.py @@ -226,7 +226,7 @@ def dereference_refs( ... } >>> result = dereference_refs(schema) # Won't cause infinite recursion - Note: + !!! note - Circular references are handled gracefully by breaking cycles - Mixed $ref objects (with both $ref and other properties) are supported - Additional properties in mixed $refs override resolved properties diff --git a/libs/core/langchain_core/utils/pydantic.py b/libs/core/langchain_core/utils/pydantic.py index db7bf460ffb..fd3413e715b 100644 --- a/libs/core/langchain_core/utils/pydantic.py +++ b/libs/core/langchain_core/utils/pydantic.py @@ -65,8 +65,8 @@ def get_pydantic_major_version() -> int: PYDANTIC_MAJOR_VERSION = PYDANTIC_VERSION.major PYDANTIC_MINOR_VERSION = PYDANTIC_VERSION.minor -IS_PYDANTIC_V1 = PYDANTIC_VERSION.major == 1 -IS_PYDANTIC_V2 = PYDANTIC_VERSION.major == 2 +IS_PYDANTIC_V1 = False +IS_PYDANTIC_V2 = True PydanticBaseModel = BaseModel TypeBaseModel = type[BaseModel] @@ -78,7 +78,7 @@ def is_pydantic_v1_subclass(cls: type) -> bool: """Check if the given class is Pydantic v1-like. Returns: - True if the given class is a subclass of Pydantic `BaseModel` 1.x. + `True` if the given class is a subclass of Pydantic `BaseModel` 1.x. """ return issubclass(cls, BaseModelV1) @@ -87,7 +87,7 @@ def is_pydantic_v2_subclass(cls: type) -> bool: """Check if the given class is Pydantic v2-like. Returns: - True if the given class is a subclass of Pydantic BaseModel 2.x. + `True` if the given class is a subclass of Pydantic BaseModel 2.x. """ return issubclass(cls, BaseModel) @@ -101,7 +101,7 @@ def is_basemodel_subclass(cls: type) -> bool: * pydantic.v1.BaseModel in Pydantic 2.x Returns: - True if the given class is a subclass of Pydantic `BaseModel`. + `True` if the given class is a subclass of Pydantic `BaseModel`. """ # Before we can use issubclass on the cls we need to check if it is a class if not inspect.isclass(cls) or isinstance(cls, GenericAlias): @@ -119,7 +119,7 @@ def is_basemodel_instance(obj: Any) -> bool: * pydantic.v1.BaseModel in Pydantic 2.x Returns: - True if the given class is an instance of Pydantic `BaseModel`. + `True` if the given class is an instance of Pydantic `BaseModel`. """ return isinstance(obj, (BaseModel, BaseModelV1)) @@ -206,7 +206,7 @@ def _create_subset_model_v1( descriptions: dict | None = None, fn_description: str | None = None, ) -> type[BaseModel]: - """Create a pydantic model with only a subset of model's fields.""" + """Create a Pydantic model with only a subset of model's fields.""" fields = {} for field_name in field_names: @@ -235,7 +235,7 @@ def _create_subset_model_v2( descriptions: dict | None = None, fn_description: str | None = None, ) -> type[BaseModel]: - """Create a pydantic model with a subset of the model fields.""" + """Create a Pydantic model with a subset of the model fields.""" descriptions_ = descriptions or {} fields = {} for field_name in field_names: @@ -438,9 +438,9 @@ def create_model( /, **field_definitions: Any, ) -> type[BaseModel]: - """Create a pydantic model with the given field definitions. + """Create a Pydantic model with the given field definitions. - Please use create_model_v2 instead of this function. + Please use `create_model_v2` instead of this function. Args: model_name: The name of the model. @@ -511,7 +511,7 @@ def create_model_v2( field_definitions: dict[str, Any] | None = None, root: Any | None = None, ) -> type[BaseModel]: - """Create a pydantic model with the given field definitions. + """Create a Pydantic model with the given field definitions. Attention: Please do not use outside of langchain packages. This API @@ -522,7 +522,7 @@ def create_model_v2( module_name: The name of the module where the model is defined. This is used by Pydantic to resolve any forward references. field_definitions: The field definitions for the model. - root: Type for a root model (RootModel) + root: Type for a root model (`RootModel`) Returns: The created model. diff --git a/libs/core/langchain_core/utils/strings.py b/libs/core/langchain_core/utils/strings.py index 86f7499dcc6..9326bc0662f 100644 --- a/libs/core/langchain_core/utils/strings.py +++ b/libs/core/langchain_core/utils/strings.py @@ -30,10 +30,7 @@ def stringify_dict(data: dict) -> str: Returns: The stringified dictionary. """ - text = "" - for key, value in data.items(): - text += key + ": " + stringify_value(value) + "\n" - return text + return "".join(f"{key}: {stringify_value(value)}\n" for key, value in data.items()) def comma_list(items: list[Any]) -> str: @@ -57,7 +54,7 @@ def sanitize_for_postgres(text: str, replacement: str = "") -> str: Args: text: The text to sanitize. - replacement: String to replace NUL bytes with. Defaults to empty string. + replacement: String to replace NUL bytes with. Returns: The sanitized text with NUL bytes removed or replaced. diff --git a/libs/core/langchain_core/utils/utils.py b/libs/core/langchain_core/utils/utils.py index 3d9c2a838bf..cb22c049dd8 100644 --- a/libs/core/langchain_core/utils/utils.py +++ b/libs/core/langchain_core/utils/utils.py @@ -123,8 +123,8 @@ def guard_import( Args: module_name: The name of the module to import. - pip_name: The name of the module to install with pip. Defaults to `None`. - package: The package to import the module from. Defaults to `None`. + pip_name: The name of the module to install with pip. + package: The package to import the module from. Returns: The imported module. @@ -155,11 +155,11 @@ def check_package_version( Args: package: The name of the package. - lt_version: The version must be less than this. Defaults to `None`. - lte_version: The version must be less than or equal to this. Defaults to `None`. - gt_version: The version must be greater than this. Defaults to `None`. + lt_version: The version must be less than this. + lte_version: The version must be less than or equal to this. + gt_version: The version must be greater than this. gte_version: The version must be greater than or equal to this. - Defaults to `None`. + Raises: ValueError: If the package version does not meet the requirements. @@ -218,7 +218,7 @@ def _build_model_kwargs( values: dict[str, Any], all_required_field_names: set[str], ) -> dict[str, Any]: - """Build "model_kwargs" param from Pydantic constructor values. + """Build `model_kwargs` param from Pydantic constructor values. Args: values: All init args passed in by user. @@ -228,8 +228,8 @@ def _build_model_kwargs( Extra kwargs. Raises: - ValueError: If a field is specified in both values and extra_kwargs. - ValueError: If a field is specified in model_kwargs. + ValueError: If a field is specified in both `values` and `extra_kwargs`. + ValueError: If a field is specified in `model_kwargs`. """ extra_kwargs = values.get("model_kwargs", {}) for field_name in list(values): @@ -267,6 +267,10 @@ def build_extra_kwargs( ) -> dict[str, Any]: """Build extra kwargs from values and extra_kwargs. + !!! danger "DON'T USE" + Kept for backwards-compatibility but should never have been public. Use the + internal `_build_model_kwargs` function instead. + Args: extra_kwargs: Extra kwargs passed in by user. values: Values passed in by user. @@ -276,9 +280,10 @@ def build_extra_kwargs( Extra kwargs. Raises: - ValueError: If a field is specified in both values and extra_kwargs. - ValueError: If a field is specified in model_kwargs. + ValueError: If a field is specified in both `values` and `extra_kwargs`. + ValueError: If a field is specified in `model_kwargs`. """ + # DON'T USE! Kept for backwards-compatibility but should never have been public. for field_name in list(values): if field_name in extra_kwargs: msg = f"Found {field_name} supplied twice." @@ -292,6 +297,7 @@ def build_extra_kwargs( ) extra_kwargs[field_name] = values.pop(field_name) + # DON'T USE! Kept for backwards-compatibility but should never have been public. invalid_model_kwargs = all_required_field_names.intersection(extra_kwargs.keys()) if invalid_model_kwargs: msg = ( @@ -300,6 +306,7 @@ def build_extra_kwargs( ) raise ValueError(msg) + # DON'T USE! Kept for backwards-compatibility but should never have been public. return extra_kwargs diff --git a/libs/core/langchain_core/vectorstores/base.py b/libs/core/langchain_core/vectorstores/base.py index fa15a3018aa..3580f5e9cde 100644 --- a/libs/core/langchain_core/vectorstores/base.py +++ b/libs/core/langchain_core/vectorstores/base.py @@ -52,22 +52,22 @@ class VectorStore(ABC): ids: list[str] | None = None, **kwargs: Any, ) -> list[str]: - """Run more texts through the embeddings and add to the vectorstore. + """Run more texts through the embeddings and add to the `VectorStore`. Args: - texts: Iterable of strings to add to the vectorstore. + texts: Iterable of strings to add to the `VectorStore`. metadatas: Optional list of metadatas associated with the texts. ids: Optional list of IDs associated with the texts. - **kwargs: vectorstore specific parameters. + **kwargs: `VectorStore` specific parameters. One of the kwargs should be `ids` which is a list of ids associated with the texts. Returns: - List of ids from adding the texts into the vectorstore. + List of IDs from adding the texts into the `VectorStore`. Raises: ValueError: If the number of metadatas does not match the number of texts. - ValueError: If the number of ids does not match the number of texts. + ValueError: If the number of IDs does not match the number of texts. """ if type(self).add_documents != VectorStore.add_documents: # This condition is triggered if the subclass has provided @@ -109,11 +109,12 @@ class VectorStore(ABC): """Delete by vector ID or other criteria. Args: - ids: List of ids to delete. If `None`, delete all. Default is None. + ids: List of IDs to delete. If `None`, delete all. **kwargs: Other keyword arguments that subclasses might use. Returns: - True if deletion is successful, False otherwise, None if not implemented. + `True` if deletion is successful, `False` otherwise, `None` if not + implemented. """ msg = "delete method must be implemented by subclass." raise NotImplementedError(msg) @@ -135,12 +136,10 @@ class VectorStore(ABC): some IDs. Args: - ids: List of ids to retrieve. + ids: List of IDs to retrieve. Returns: - List of Documents. - - !!! version-added "Added in version 0.2.11" + List of `Document` objects. """ msg = f"{self.__class__.__name__} does not yet support get_by_ids." raise NotImplementedError(msg) @@ -163,12 +162,10 @@ class VectorStore(ABC): some IDs. Args: - ids: List of ids to retrieve. + ids: List of IDs to retrieve. Returns: - List of Documents. - - !!! version-added "Added in version 0.2.11" + List of `Document` objects. """ return await run_in_executor(None, self.get_by_ids, ids) @@ -176,11 +173,12 @@ class VectorStore(ABC): """Async delete by vector ID or other criteria. Args: - ids: List of ids to delete. If `None`, delete all. Default is None. + ids: List of IDs to delete. If `None`, delete all. **kwargs: Other keyword arguments that subclasses might use. Returns: - True if deletion is successful, False otherwise, None if not implemented. + `True` if deletion is successful, `False` otherwise, `None` if not + implemented. """ return await run_in_executor(None, self.delete, ids, **kwargs) @@ -192,21 +190,20 @@ class VectorStore(ABC): ids: list[str] | None = None, **kwargs: Any, ) -> list[str]: - """Async run more texts through the embeddings and add to the vectorstore. + """Async run more texts through the embeddings and add to the `VectorStore`. Args: - texts: Iterable of strings to add to the vectorstore. + texts: Iterable of strings to add to the `VectorStore`. metadatas: Optional list of metadatas associated with the texts. - Default is None. ids: Optional list - **kwargs: vectorstore specific parameters. + **kwargs: `VectorStore` specific parameters. Returns: - List of ids from adding the texts into the vectorstore. + List of IDs from adding the texts into the `VectorStore`. Raises: ValueError: If the number of metadatas does not match the number of texts. - ValueError: If the number of ids does not match the number of texts. + ValueError: If the number of IDs does not match the number of texts. """ if ids is not None: # For backward compatibility @@ -235,13 +232,14 @@ class VectorStore(ABC): return await run_in_executor(None, self.add_texts, texts, metadatas, **kwargs) def add_documents(self, documents: list[Document], **kwargs: Any) -> list[str]: - """Add or update documents in the vectorstore. + """Add or update documents in the `VectorStore`. Args: - documents: Documents to add to the vectorstore. + documents: Documents to add to the `VectorStore`. **kwargs: Additional keyword arguments. - if kwargs contains ids and documents contain ids, - the ids in the kwargs will receive precedence. + + If kwargs contains IDs and documents contain ids, the IDs in the kwargs + will receive precedence. Returns: List of IDs of the added texts. @@ -267,10 +265,10 @@ class VectorStore(ABC): async def aadd_documents( self, documents: list[Document], **kwargs: Any ) -> list[str]: - """Async run more documents through the embeddings and add to the vectorstore. + """Async run more documents through the embeddings and add to the `VectorStore`. Args: - documents: Documents to add to the vectorstore. + documents: Documents to add to the `VectorStore`. **kwargs: Additional keyword arguments. Returns: @@ -296,17 +294,17 @@ class VectorStore(ABC): """Return docs most similar to query using a specified search type. Args: - query: Input text - search_type: Type of search to perform. Can be "similarity", - "mmr", or "similarity_score_threshold". + query: Input text. + search_type: Type of search to perform. Can be `'similarity'`, `'mmr'`, or + `'similarity_score_threshold'`. **kwargs: Arguments to pass to the search method. Returns: - List of Documents most similar to the query. + List of `Document` objects most similar to the query. Raises: - ValueError: If search_type is not one of "similarity", - "mmr", or "similarity_score_threshold". + ValueError: If `search_type` is not one of `'similarity'`, + `'mmr'`, or `'similarity_score_threshold'`. """ if search_type == "similarity": return self.similarity_search(query, **kwargs) @@ -331,16 +329,16 @@ class VectorStore(ABC): Args: query: Input text. - search_type: Type of search to perform. Can be "similarity", - "mmr", or "similarity_score_threshold". + search_type: Type of search to perform. Can be `'similarity'`, `'mmr'`, or + `'similarity_score_threshold'`. **kwargs: Arguments to pass to the search method. Returns: - List of Documents most similar to the query. + List of `Document` objects most similar to the query. Raises: - ValueError: If search_type is not one of "similarity", - "mmr", or "similarity_score_threshold". + ValueError: If `search_type` is not one of `'similarity'`, + `'mmr'`, or `'similarity_score_threshold'`. """ if search_type == "similarity": return await self.asimilarity_search(query, **kwargs) @@ -365,11 +363,11 @@ class VectorStore(ABC): Args: query: Input text. - k: Number of Documents to return. Defaults to 4. + k: Number of `Document` objects to return. **kwargs: Arguments to pass to the search method. Returns: - List of Documents most similar to the query. + List of `Document` objects most similar to the query. """ @staticmethod @@ -424,7 +422,7 @@ class VectorStore(ABC): **kwargs: Arguments to pass to the search method. Returns: - List of Tuples of (doc, similarity_score). + List of tuples of `(doc, similarity_score)`. """ raise NotImplementedError @@ -438,7 +436,7 @@ class VectorStore(ABC): **kwargs: Arguments to pass to the search method. Returns: - List of Tuples of (doc, similarity_score). + List of tuples of `(doc, similarity_score)`. """ # This is a temporary workaround to make the similarity search # asynchronous. The proper solution is to make the similarity search @@ -456,19 +454,19 @@ class VectorStore(ABC): """Default similarity search with relevance scores. Modify if necessary in subclass. - Return docs and relevance scores in the range [0, 1]. + Return docs and relevance scores in the range `[0, 1]`. - 0 is dissimilar, 1 is most similar. + `0` is dissimilar, `1` is most similar. Args: query: Input text. - k: Number of Documents to return. Defaults to 4. - **kwargs: kwargs to be passed to similarity search. Should include: - score_threshold: Optional, a floating point value between 0 to 1 to - filter the resulting set of retrieved docs + k: Number of `Document` objects to return. + **kwargs: kwargs to be passed to similarity search. Should include + `score_threshold`, An optional floating point value between `0` to `1` + to filter the resulting set of retrieved docs Returns: - List of Tuples of (doc, similarity_score) + List of tuples of `(doc, similarity_score)` """ relevance_score_fn = self._select_relevance_score_fn() docs_and_scores = self.similarity_search_with_score(query, k, **kwargs) @@ -483,19 +481,19 @@ class VectorStore(ABC): """Default similarity search with relevance scores. Modify if necessary in subclass. - Return docs and relevance scores in the range [0, 1]. + Return docs and relevance scores in the range `[0, 1]`. - 0 is dissimilar, 1 is most similar. + `0` is dissimilar, `1` is most similar. Args: query: Input text. - k: Number of Documents to return. Defaults to 4. - **kwargs: kwargs to be passed to similarity search. Should include: - score_threshold: Optional, a floating point value between 0 to 1 to - filter the resulting set of retrieved docs + k: Number of `Document` objects to return. + **kwargs: kwargs to be passed to similarity search. Should include + `score_threshold`, An optional floating point value between `0` to `1` + to filter the resulting set of retrieved docs Returns: - List of Tuples of (doc, similarity_score) + List of tuples of `(doc, similarity_score)` """ relevance_score_fn = self._select_relevance_score_fn() docs_and_scores = await self.asimilarity_search_with_score(query, k, **kwargs) @@ -507,19 +505,19 @@ class VectorStore(ABC): k: int = 4, **kwargs: Any, ) -> list[tuple[Document, float]]: - """Return docs and relevance scores in the range [0, 1]. + """Return docs and relevance scores in the range `[0, 1]`. - 0 is dissimilar, 1 is most similar. + `0` is dissimilar, `1` is most similar. Args: query: Input text. - k: Number of Documents to return. Defaults to 4. - **kwargs: kwargs to be passed to similarity search. Should include: - score_threshold: Optional, a floating point value between 0 to 1 to - filter the resulting set of retrieved docs. + k: Number of `Document` objects to return. + **kwargs: kwargs to be passed to similarity search. Should include + `score_threshold`, An optional floating point value between `0` to `1` + to filter the resulting set of retrieved docs Returns: - List of Tuples of (doc, similarity_score). + List of tuples of `(doc, similarity_score)`. """ score_threshold = kwargs.pop("score_threshold", None) @@ -556,19 +554,19 @@ class VectorStore(ABC): k: int = 4, **kwargs: Any, ) -> list[tuple[Document, float]]: - """Async return docs and relevance scores in the range [0, 1]. + """Async return docs and relevance scores in the range `[0, 1]`. - 0 is dissimilar, 1 is most similar. + `0` is dissimilar, `1` is most similar. Args: query: Input text. - k: Number of Documents to return. Defaults to 4. - **kwargs: kwargs to be passed to similarity search. Should include: - score_threshold: Optional, a floating point value between 0 to 1 to - filter the resulting set of retrieved docs + k: Number of `Document` objects to return. + **kwargs: kwargs to be passed to similarity search. Should include + `score_threshold`, An optional floating point value between `0` to `1` + to filter the resulting set of retrieved docs Returns: - List of Tuples of (doc, similarity_score) + List of tuples of `(doc, similarity_score)` """ score_threshold = kwargs.pop("score_threshold", None) @@ -606,11 +604,11 @@ class VectorStore(ABC): Args: query: Input text. - k: Number of Documents to return. Defaults to 4. + k: Number of `Document` objects to return. **kwargs: Arguments to pass to the search method. Returns: - List of Documents most similar to the query. + List of `Document` objects most similar to the query. """ # This is a temporary workaround to make the similarity search # asynchronous. The proper solution is to make the similarity search @@ -624,11 +622,11 @@ class VectorStore(ABC): Args: embedding: Embedding to look up documents similar to. - k: Number of Documents to return. Defaults to 4. + k: Number of `Document` objects to return. **kwargs: Arguments to pass to the search method. Returns: - List of Documents most similar to the query vector. + List of `Document` objects most similar to the query vector. """ raise NotImplementedError @@ -639,11 +637,11 @@ class VectorStore(ABC): Args: embedding: Embedding to look up documents similar to. - k: Number of Documents to return. Defaults to 4. + k: Number of `Document` objects to return. **kwargs: Arguments to pass to the search method. Returns: - List of Documents most similar to the query vector. + List of `Document` objects most similar to the query vector. """ # This is a temporary workaround to make the similarity search # asynchronous. The proper solution is to make the similarity search @@ -667,17 +665,15 @@ class VectorStore(ABC): Args: query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - fetch_k: Number of Documents to fetch to pass to MMR algorithm. - Default is 20. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. + k: Number of `Document` objects to return. + fetch_k: Number of `Document` objects to fetch to pass to MMR algorithm. + lambda_mult: Number between `0` and `1` that determines the degree + of diversity among the results with `0` corresponding + to maximum diversity and `1` to minimum diversity. **kwargs: Arguments to pass to the search method. Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. """ raise NotImplementedError @@ -696,17 +692,15 @@ class VectorStore(ABC): Args: query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - fetch_k: Number of Documents to fetch to pass to MMR algorithm. - Default is 20. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. + k: Number of `Document` objects to return. + fetch_k: Number of `Document` objects to fetch to pass to MMR algorithm. + lambda_mult: Number between `0` and `1` that determines the degree + of diversity among the results with `0` corresponding + to maximum diversity and `1` to minimum diversity. **kwargs: Arguments to pass to the search method. Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. """ # This is a temporary workaround to make the similarity search # asynchronous. The proper solution is to make the similarity search @@ -736,17 +730,15 @@ class VectorStore(ABC): Args: embedding: Embedding to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - fetch_k: Number of Documents to fetch to pass to MMR algorithm. - Default is 20. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. + k: Number of `Document` objects to return. + fetch_k: Number of `Document` objects to fetch to pass to MMR algorithm. + lambda_mult: Number between `0` and `1` that determines the degree + of diversity among the results with `0` corresponding + to maximum diversity and `1` to minimum diversity. **kwargs: Arguments to pass to the search method. Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. """ raise NotImplementedError @@ -765,17 +757,15 @@ class VectorStore(ABC): Args: embedding: Embedding to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - fetch_k: Number of Documents to fetch to pass to MMR algorithm. - Default is 20. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. + k: Number of `Document` objects to return. + fetch_k: Number of `Document` objects to fetch to pass to MMR algorithm. + lambda_mult: Number between `0` and `1` that determines the degree + of diversity among the results with `0` corresponding + to maximum diversity and `1` to minimum diversity. **kwargs: Arguments to pass to the search method. Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. """ return await run_in_executor( None, @@ -794,15 +784,15 @@ class VectorStore(ABC): embedding: Embeddings, **kwargs: Any, ) -> Self: - """Return VectorStore initialized from documents and embeddings. + """Return `VectorStore` initialized from documents and embeddings. Args: - documents: List of Documents to add to the vectorstore. + documents: List of `Document` objects to add to the `VectorStore`. embedding: Embedding function to use. **kwargs: Additional keyword arguments. Returns: - VectorStore initialized from documents and embeddings. + `VectorStore` initialized from documents and embeddings. """ texts = [d.page_content for d in documents] metadatas = [d.metadata for d in documents] @@ -824,15 +814,15 @@ class VectorStore(ABC): embedding: Embeddings, **kwargs: Any, ) -> Self: - """Async return VectorStore initialized from documents and embeddings. + """Async return `VectorStore` initialized from documents and embeddings. Args: - documents: List of Documents to add to the vectorstore. + documents: List of `Document` objects to add to the `VectorStore`. embedding: Embedding function to use. **kwargs: Additional keyword arguments. Returns: - VectorStore initialized from documents and embeddings. + `VectorStore` initialized from documents and embeddings. """ texts = [d.page_content for d in documents] metadatas = [d.metadata for d in documents] @@ -858,18 +848,17 @@ class VectorStore(ABC): ids: list[str] | None = None, **kwargs: Any, ) -> VST: - """Return VectorStore initialized from texts and embeddings. + """Return `VectorStore` initialized from texts and embeddings. Args: - texts: Texts to add to the vectorstore. + texts: Texts to add to the `VectorStore`. embedding: Embedding function to use. metadatas: Optional list of metadatas associated with the texts. - Default is None. ids: Optional list of IDs associated with the texts. **kwargs: Additional keyword arguments. Returns: - VectorStore initialized from texts and embeddings. + `VectorStore` initialized from texts and embeddings. """ @classmethod @@ -882,18 +871,17 @@ class VectorStore(ABC): ids: list[str] | None = None, **kwargs: Any, ) -> Self: - """Async return VectorStore initialized from texts and embeddings. + """Async return `VectorStore` initialized from texts and embeddings. Args: - texts: Texts to add to the vectorstore. + texts: Texts to add to the `VectorStore`. embedding: Embedding function to use. metadatas: Optional list of metadatas associated with the texts. - Default is None. ids: Optional list of IDs associated with the texts. **kwargs: Additional keyword arguments. Returns: - VectorStore initialized from texts and embeddings. + `VectorStore` initialized from texts and embeddings. """ if ids is not None: kwargs["ids"] = ids @@ -909,27 +897,29 @@ class VectorStore(ABC): return tags def as_retriever(self, **kwargs: Any) -> VectorStoreRetriever: - """Return VectorStoreRetriever initialized from this VectorStore. + """Return `VectorStoreRetriever` initialized from this `VectorStore`. Args: **kwargs: Keyword arguments to pass to the search function. Can include: - search_type: Defines the type of search that the Retriever should - perform. Can be "similarity" (default), "mmr", or - "similarity_score_threshold". - search_kwargs: Keyword arguments to pass to the search function. Can + + * `search_type`: Defines the type of search that the Retriever should + perform. Can be `'similarity'` (default), `'mmr'`, or + `'similarity_score_threshold'`. + * `search_kwargs`: Keyword arguments to pass to the search function. Can include things like: - k: Amount of documents to return (Default: 4) - score_threshold: Minimum relevance threshold - for similarity_score_threshold - fetch_k: Amount of documents to pass to MMR algorithm - (Default: 20) - lambda_mult: Diversity of results returned by MMR; - 1 for minimum diversity and 0 for maximum. (Default: 0.5) - filter: Filter by document metadata + + * `k`: Amount of documents to return (Default: `4`) + * `score_threshold`: Minimum relevance threshold + for `similarity_score_threshold` + * `fetch_k`: Amount of documents to pass to MMR algorithm + (Default: `20`) + * `lambda_mult`: Diversity of results returned by MMR; + `1` for minimum diversity and 0 for maximum. (Default: `0.5`) + * `filter`: Filter by document metadata Returns: - Retriever class for VectorStore. + Retriever class for `VectorStore`. Examples: ```python @@ -969,7 +959,7 @@ class VectorStoreRetriever(BaseRetriever): vectorstore: VectorStore """VectorStore to use for retrieval.""" search_type: str = "similarity" - """Type of search to perform. Defaults to "similarity".""" + """Type of search to perform.""" search_kwargs: dict = Field(default_factory=dict) """Keyword arguments to pass to the search function.""" allowed_search_types: ClassVar[Collection[str]] = ( @@ -994,8 +984,8 @@ class VectorStoreRetriever(BaseRetriever): Validated values. Raises: - ValueError: If search_type is not one of the allowed search types. - ValueError: If score_threshold is not specified with a float value(0~1) + ValueError: If `search_type` is not one of the allowed search types. + ValueError: If `score_threshold` is not specified with a float value(`0~1`) """ search_type = values.get("search_type", "similarity") if search_type not in cls.allowed_search_types: @@ -1083,10 +1073,10 @@ class VectorStoreRetriever(BaseRetriever): return docs def add_documents(self, documents: list[Document], **kwargs: Any) -> list[str]: - """Add documents to the vectorstore. + """Add documents to the `VectorStore`. Args: - documents: Documents to add to the vectorstore. + documents: Documents to add to the `VectorStore`. **kwargs: Other keyword arguments that subclasses might use. Returns: @@ -1097,10 +1087,10 @@ class VectorStoreRetriever(BaseRetriever): async def aadd_documents( self, documents: list[Document], **kwargs: Any ) -> list[str]: - """Async add documents to the vectorstore. + """Async add documents to the `VectorStore`. Args: - documents: Documents to add to the vectorstore. + documents: Documents to add to the `VectorStore`. **kwargs: Other keyword arguments that subclasses might use. Returns: diff --git a/libs/core/langchain_core/vectorstores/in_memory.py b/libs/core/langchain_core/vectorstores/in_memory.py index 91b5c2c243f..cb8a9b19857 100644 --- a/libs/core/langchain_core/vectorstores/in_memory.py +++ b/libs/core/langchain_core/vectorstores/in_memory.py @@ -257,10 +257,10 @@ class InMemoryVectorStore(VectorStore): """Get documents by their ids. Args: - ids: The ids of the documents to get. + ids: The IDs of the documents to get. Returns: - A list of Document objects. + A list of `Document` objects. """ documents = [] @@ -281,10 +281,10 @@ class InMemoryVectorStore(VectorStore): """Async get documents by their ids. Args: - ids: The ids of the documents to get. + ids: The IDs of the documents to get. Returns: - A list of Document objects. + A list of `Document` objects. """ return self.get_by_ids(ids) diff --git a/libs/core/langchain_core/vectorstores/utils.py b/libs/core/langchain_core/vectorstores/utils.py index ca46e638223..855af9211e7 100644 --- a/libs/core/langchain_core/vectorstores/utils.py +++ b/libs/core/langchain_core/vectorstores/utils.py @@ -1,4 +1,4 @@ -"""Internal utilities for the in memory implementation of VectorStore. +"""Internal utilities for the in memory implementation of `VectorStore`. These are part of a private API, and users should not use them directly as they can change without notice. @@ -112,8 +112,8 @@ def maximal_marginal_relevance( Args: query_embedding: The query embedding. embedding_list: A list of embeddings. - lambda_mult: The lambda parameter for MMR. Default is 0.5. - k: The number of embeddings to return. Default is 4. + lambda_mult: The lambda parameter for MMR. + k: The number of embeddings to return. Returns: A list of indices of the embeddings to return. diff --git a/libs/core/langchain_core/version.py b/libs/core/langchain_core/version.py index cf53d6cf6ee..611a108116a 100644 --- a/libs/core/langchain_core/version.py +++ b/libs/core/langchain_core/version.py @@ -1,3 +1,3 @@ """langchain-core version information and utilities.""" -VERSION = "1.0.0a8" +VERSION = "1.0.3" diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index 2fa85a31c84..6fb85621b5b 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -3,8 +3,13 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-core" +description = "Building applications with LLMs through composability" license = {text = "MIT"} +readme = "README.md" +authors = [] + +version = "1.0.3" requires-python = ">=3.10.0,<4.0.0" dependencies = [ "langsmith>=0.3.45,<1.0.0", @@ -15,18 +20,15 @@ dependencies = [ "packaging>=23.2.0,<26.0.0", "pydantic>=2.7.4,<3.0.0", ] -name = "langchain-core" -version = "1.0.0a8" -description = "Building applications with LLMs through composability" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/core" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-core%3D%3D1%22" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/" +Documentation = "https://reference.langchain.com/python/langchain_core/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/core" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-core%3D%3D1%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] lint = ["ruff>=0.13.1,<0.14.0"] @@ -34,6 +36,7 @@ typing = [ "mypy>=1.18.1,<1.19.0", "types-pyyaml>=6.0.12.2,<7.0.0.0", "types-requests>=2.28.11.5,<3.0.0.0", + "langchain-model-profiles", "langchain-text-splitters", ] dev = [ @@ -55,6 +58,7 @@ test = [ "blockbuster>=1.5.18,<1.6.0", "numpy>=1.26.4; python_version<'3.13'", "numpy>=2.1.0; python_version>='3.13'", + "langchain-model-profiles", "langchain-tests", "pytest-benchmark", "pytest-codspeed", @@ -62,6 +66,7 @@ test = [ test_integration = [] [tool.uv.sources] +langchain-model-profiles = { path = "../model-profiles" } langchain-tests = { path = "../standard-tests" } langchain-text-splitters = { path = "../text-splitters" } @@ -100,7 +105,6 @@ ignore = [ "ANN401", # No Any types "BLE", # Blind exceptions "ERA", # No commented-out code - "PLR2004", # Comparison to magic number ] unfixable = [ "B028", # People should intentionally tune the stacklevel @@ -121,7 +125,7 @@ ignore-var-parameters = true # ignore missing documentation for *args and **kwa "langchain_core/utils/mustache.py" = [ "PLW0603",] "langchain_core/sys_info.py" = [ "T201",] "tests/unit_tests/test_tools.py" = [ "ARG",] -"tests/**" = [ "D1", "S", "SLF",] +"tests/**" = [ "D1", "PLR2004", "S", "SLF",] "scripts/**" = [ "INP", "S",] [tool.coverage.run] @@ -129,8 +133,10 @@ omit = [ "tests/*",] [tool.pytest.ini_options] addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5" -markers = [ "requires: mark tests as requiring a specific library", "compile: mark placeholder test used to compile integration tests without running them", ] +markers = [ + "requires: mark tests as requiring a specific library", + "compile: mark placeholder test used to compile integration tests without running them", +] asyncio_mode = "auto" -filterwarnings = [ "ignore::langchain_core._api.beta_decorator.LangChainBetaWarning",] asyncio_default_fixture_loop_scope = "function" - +filterwarnings = [ "ignore::langchain_core._api.beta_decorator.LangChainBetaWarning",] diff --git a/libs/core/tests/unit_tests/callbacks/test_async_callback_manager.py b/libs/core/tests/unit_tests/callbacks/test_async_callback_manager.py index e2cbaa9c723..32658f0f904 100644 --- a/libs/core/tests/unit_tests/callbacks/test_async_callback_manager.py +++ b/libs/core/tests/unit_tests/callbacks/test_async_callback_manager.py @@ -148,4 +148,65 @@ async def test_inline_handlers_share_parent_context_multiple() -> None: 2, 3, 3, - ], f"Expected order of states was broken due to context loss. Got {states}" + ] + + +async def test_shielded_callback_context_preservation() -> None: + """Verify that shielded callbacks preserve context variables. + + This test specifically addresses the issue where async callbacks decorated + with @shielded do not properly preserve context variables, breaking + instrumentation and other context-dependent functionality. + + The issue manifests in callbacks that use the @shielded decorator: + * on_llm_end + * on_llm_error + * on_chain_end + * on_chain_error + * And other shielded callback methods + """ + context_var: contextvars.ContextVar[str] = contextvars.ContextVar("test_context") + + class ContextTestHandler(AsyncCallbackHandler): + """Handler that reads context variables in shielded callbacks.""" + + def __init__(self) -> None: + self.run_inline = False + self.context_values: list[str] = [] + + @override + async def on_llm_end(self, response: Any, **kwargs: Any) -> None: + """This method is decorated with @shielded in the run manager.""" + # This should preserve the context variable value + self.context_values.append(context_var.get("not_found")) + + @override + async def on_chain_end(self, outputs: Any, **kwargs: Any) -> None: + """This method is decorated with @shielded in the run manager.""" + # This should preserve the context variable value + self.context_values.append(context_var.get("not_found")) + + # Set up the test context + context_var.set("test_value") + handler = ContextTestHandler() + manager = AsyncCallbackManager(handlers=[handler]) + + # Create run managers that have the shielded methods + llm_managers = await manager.on_llm_start({}, ["test prompt"]) + llm_run_manager = llm_managers[0] + + chain_run_manager = await manager.on_chain_start({}, {"test": "input"}) + + # Test LLM end callback (which is shielded) + await llm_run_manager.on_llm_end({"response": "test"}) # type: ignore[arg-type] + + # Test Chain end callback (which is shielded) + await chain_run_manager.on_chain_end({"output": "test"}) + + # The context should be preserved in shielded callbacks + # This was the main issue - shielded decorators were not preserving context + assert handler.context_values == ["test_value", "test_value"], ( + f"Expected context values ['test_value', 'test_value'], " + f"but got {handler.context_values}. " + f"This indicates the shielded decorator is not preserving context variables." + ) diff --git a/libs/core/tests/unit_tests/indexing/test_hashed_document.py b/libs/core/tests/unit_tests/indexing/test_hashed_document.py index fd88391aa3a..0cabb28bbe1 100644 --- a/libs/core/tests/unit_tests/indexing/test_hashed_document.py +++ b/libs/core/tests/unit_tests/indexing/test_hashed_document.py @@ -33,7 +33,7 @@ def test_hashing() -> None: # hash should be deterministic assert hashed_document.id == "fd1dc827-051b-537d-a1fe-1fa043e8b276" - # Verify that hashing with sha1 is determinstic + # Verify that hashing with sha1 is deterministic another_hashed_document = _get_document_with_hash(document, key_encoder="sha1") assert another_hashed_document.id == hashed_document.id diff --git a/libs/core/tests/unit_tests/indexing/test_indexing.py b/libs/core/tests/unit_tests/indexing/test_indexing.py index a4baef198d7..f598048a98f 100644 --- a/libs/core/tests/unit_tests/indexing/test_indexing.py +++ b/libs/core/tests/unit_tests/indexing/test_indexing.py @@ -604,7 +604,7 @@ def test_incremental_fails_with_bad_source_ids( with pytest.raises( ValueError, - match="Source ids are required when cleanup mode is incremental or scoped_full", + match="Source IDs are required when cleanup mode is incremental or scoped_full", ): # Should raise an error because no source id function was specified index( @@ -654,7 +654,7 @@ async def test_aincremental_fails_with_bad_source_ids( with pytest.raises( ValueError, - match="Source ids are required when cleanup mode is incremental or scoped_full", + match="Source IDs are required when cleanup mode is incremental or scoped_full", ): # Should raise an error because no source id function was specified await aindex( @@ -956,7 +956,7 @@ def test_scoped_full_fails_with_bad_source_ids( with pytest.raises( ValueError, - match="Source ids are required when cleanup mode is incremental or scoped_full", + match="Source IDs are required when cleanup mode is incremental or scoped_full", ): # Should raise an error because no source id function was specified index( @@ -1006,7 +1006,7 @@ async def test_ascoped_full_fails_with_bad_source_ids( with pytest.raises( ValueError, - match="Source ids are required when cleanup mode is incremental or scoped_full", + match="Source IDs are required when cleanup mode is incremental or scoped_full", ): # Should raise an error because no source id function was specified await aindex( @@ -2801,7 +2801,7 @@ def test_index_with_upsert_kwargs( ] assert [doc.metadata for doc in args[0]] == [{"source": "1"}, {"source": "2"}] - # Check that ids are present + # Check that IDs are present assert "ids" in kwargs assert isinstance(kwargs["ids"], list) assert len(kwargs["ids"]) == 2 @@ -2932,7 +2932,7 @@ async def test_aindex_with_upsert_kwargs( ] assert [doc.metadata for doc in args[0]] == [{"source": "1"}, {"source": "2"}] - # Check that ids are present + # Check that IDs are present assert "ids" in kwargs assert isinstance(kwargs["ids"], list) assert len(kwargs["ids"]) == 2 diff --git a/libs/core/tests/unit_tests/language_models/chat_models/test_base.py b/libs/core/tests/unit_tests/language_models/chat_models/test_base.py index 3d8e03b5e75..b20623ad6fb 100644 --- a/libs/core/tests/unit_tests/language_models/chat_models/test_base.py +++ b/libs/core/tests/unit_tests/language_models/chat_models/test_base.py @@ -57,7 +57,7 @@ def _content_blocks_equal_ignore_id( expected: Expected content to compare against (string or list of blocks). Returns: - True if content matches (excluding `id` fields), False otherwise. + True if content matches (excluding `id` fields), `False` otherwise. """ if isinstance(actual, str) or isinstance(expected, str): @@ -1217,3 +1217,20 @@ def test_get_ls_params() -> None: ls_params = llm._get_ls_params(stop=["stop"]) assert ls_params["ls_stop"] == ["stop"] + + +def test_model_profiles() -> None: + model = GenericFakeChatModel(messages=iter([])) + profile = model.profile + assert profile == {} + + class MyModel(GenericFakeChatModel): + model: str = "gpt-5" + + @property + def _llm_type(self) -> str: + return "openai-chat" + + model = MyModel(messages=iter([])) + profile = model.profile + assert profile diff --git a/libs/core/tests/unit_tests/language_models/chat_models/test_cache.py b/libs/core/tests/unit_tests/language_models/chat_models/test_cache.py index b246da593fa..667b2fba29b 100644 --- a/libs/core/tests/unit_tests/language_models/chat_models/test_cache.py +++ b/libs/core/tests/unit_tests/language_models/chat_models/test_cache.py @@ -26,11 +26,11 @@ class InMemoryCache(BaseCache): self._cache: dict[tuple[str, str], RETURN_VAL_TYPE] = {} def lookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TYPE | None: - """Look up based on prompt and llm_string.""" + """Look up based on `prompt` and `llm_string`.""" return self._cache.get((prompt, llm_string), None) def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: - """Update cache based on prompt and llm_string.""" + """Update cache based on `prompt` and `llm_string`.""" self._cache[prompt, llm_string] = return_val @override diff --git a/libs/core/tests/unit_tests/language_models/llms/test_cache.py b/libs/core/tests/unit_tests/language_models/llms/test_cache.py index a0bd8a34b33..720c247a9c9 100644 --- a/libs/core/tests/unit_tests/language_models/llms/test_cache.py +++ b/libs/core/tests/unit_tests/language_models/llms/test_cache.py @@ -15,11 +15,11 @@ class InMemoryCache(BaseCache): self._cache: dict[tuple[str, str], RETURN_VAL_TYPE] = {} def lookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TYPE | None: - """Look up based on prompt and llm_string.""" + """Look up based on `prompt` and `llm_string`.""" return self._cache.get((prompt, llm_string), None) def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: - """Update cache based on prompt and llm_string.""" + """Update cache based on `prompt` and `llm_string`.""" self._cache[prompt, llm_string] = return_val @override @@ -68,12 +68,12 @@ class InMemoryCacheBad(BaseCache): self._cache: dict[tuple[str, str], RETURN_VAL_TYPE] = {} def lookup(self, prompt: str, llm_string: str) -> RETURN_VAL_TYPE | None: - """Look up based on prompt and llm_string.""" + """Look up based on `prompt` and `llm_string`.""" msg = "This code should not be triggered" raise NotImplementedError(msg) def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: - """Update cache based on prompt and llm_string.""" + """Update cache based on `prompt` and `llm_string`.""" msg = "This code should not be triggered" raise NotImplementedError(msg) diff --git a/libs/core/tests/unit_tests/messages/test_ai.py b/libs/core/tests/unit_tests/messages/test_ai.py index d18a1e9b0a3..742f23b68be 100644 --- a/libs/core/tests/unit_tests/messages/test_ai.py +++ b/libs/core/tests/unit_tests/messages/test_ai.py @@ -1,7 +1,5 @@ from typing import cast -import pytest - from langchain_core.load import dumpd, load from langchain_core.messages import AIMessage, AIMessageChunk from langchain_core.messages import content as types @@ -358,6 +356,8 @@ def test_content_blocks() -> None: # test v1 content chunk_1.content = cast("str | list[str | dict]", chunk_1.content_blocks) + assert len(chunk_1.content) == 1 + chunk_1.content[0]["extras"] = {"baz": "qux"} # type: ignore[index] chunk_1.response_metadata["output_version"] = "v1" chunk_2.content = cast("str | list[str | dict]", chunk_2.content_blocks) @@ -368,6 +368,7 @@ def test_content_blocks() -> None: "name": "foo", "args": {"foo": "bar"}, "id": "abc_123", + "extras": {"baz": "qux"}, } ] @@ -481,18 +482,6 @@ def test_content_blocks() -> None: ] -def test_provider_warns() -> None: - # Test that major providers warn if content block standardization is not yet - # implemented. - # This test should be removed when all major providers support content block - # standardization. - message = AIMessage("Hello.", response_metadata={"model_provider": "groq"}) - with pytest.warns(match="not yet fully supported for Groq"): - content_blocks = message.content_blocks - - assert content_blocks == [{"type": "text", "text": "Hello."}] - - def test_content_blocks_reasoning_extraction() -> None: """Test best-effort reasoning extraction from `additional_kwargs`.""" message = AIMessage( diff --git a/libs/core/tests/unit_tests/messages/test_imports.py b/libs/core/tests/unit_tests/messages/test_imports.py index 4999c74cdc7..263be79bfd3 100644 --- a/libs/core/tests/unit_tests/messages/test_imports.py +++ b/libs/core/tests/unit_tests/messages/test_imports.py @@ -55,6 +55,9 @@ EXPECTED_ALL = [ "convert_to_openai_data_block", "convert_to_openai_image_block", "convert_to_openai_messages", + "UsageMetadata", + "InputTokenDetails", + "OutputTokenDetails", ] diff --git a/libs/core/tests/unit_tests/output_parsers/test_base_parsers.py b/libs/core/tests/unit_tests/output_parsers/test_base_parsers.py index fa5e9c9c9c0..2013790aa05 100644 --- a/libs/core/tests/unit_tests/output_parsers/test_base_parsers.py +++ b/libs/core/tests/unit_tests/output_parsers/test_base_parsers.py @@ -25,7 +25,7 @@ def test_base_generation_parser() -> None: """Parse a list of model Generations into a specific format. Args: - result: A list of Generations to be parsed. The Generations are assumed + result: A list of `Generation` to be parsed. The Generations are assumed to be different candidate outputs for a single model input. Many parsers assume that only a single generation is passed it in. We will assert for that @@ -67,7 +67,7 @@ def test_base_transform_output_parser() -> None: """Parse a list of model Generations into a specific format. Args: - result: A list of Generations to be parsed. The Generations are assumed + result: A list of `Generation` to be parsed. The Generations are assumed to be different candidate outputs for a single model input. Many parsers assume that only a single generation is passed it in. We will assert for that diff --git a/libs/core/tests/unit_tests/output_parsers/test_pydantic_parser.py b/libs/core/tests/unit_tests/output_parsers/test_pydantic_parser.py index 9bf3bd19489..2df7de35756 100644 --- a/libs/core/tests/unit_tests/output_parsers/test_pydantic_parser.py +++ b/libs/core/tests/unit_tests/output_parsers/test_pydantic_parser.py @@ -1,5 +1,6 @@ """Test PydanticOutputParser.""" +import sys from enum import Enum from typing import Literal @@ -13,7 +14,7 @@ from langchain_core.language_models import ParrotFakeChatModel from langchain_core.output_parsers import PydanticOutputParser from langchain_core.output_parsers.json import JsonOutputParser from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.utils.pydantic import PydanticBaseModel, TBaseModel +from langchain_core.utils.pydantic import PydanticBaseModel, TypeBaseModel class ForecastV2(pydantic.BaseModel): @@ -22,15 +23,23 @@ class ForecastV2(pydantic.BaseModel): forecast: str -class ForecastV1(V1BaseModel): - temperature: int - f_or_c: Literal["F", "C"] - forecast: str +if sys.version_info < (3, 14): + + class ForecastV1(V1BaseModel): + temperature: int + f_or_c: Literal["F", "C"] + forecast: str + + _FORECAST_MODELS_TYPES = type[ForecastV2] | type[ForecastV1] + _FORECAST_MODELS = [ForecastV2, ForecastV1] +else: + _FORECAST_MODELS_TYPES = type[ForecastV2] + _FORECAST_MODELS = [ForecastV2] -@pytest.mark.parametrize("pydantic_object", [ForecastV2, ForecastV1]) +@pytest.mark.parametrize("pydantic_object", _FORECAST_MODELS) def test_pydantic_parser_chaining( - pydantic_object: type[ForecastV2] | type[ForecastV1], + pydantic_object: _FORECAST_MODELS_TYPES, ) -> None: prompt = PromptTemplate( template="""{{ @@ -53,8 +62,8 @@ def test_pydantic_parser_chaining( assert res.forecast == "Sunny" -@pytest.mark.parametrize("pydantic_object", [ForecastV2, ForecastV1]) -def test_pydantic_parser_validation(pydantic_object: TBaseModel) -> None: +@pytest.mark.parametrize("pydantic_object", _FORECAST_MODELS) +def test_pydantic_parser_validation(pydantic_object: TypeBaseModel) -> None: bad_prompt = PromptTemplate( template="""{{ "temperature": "oof", @@ -66,18 +75,16 @@ def test_pydantic_parser_validation(pydantic_object: TBaseModel) -> None: model = ParrotFakeChatModel() - parser: PydanticOutputParser[PydanticBaseModel] = PydanticOutputParser( - pydantic_object=pydantic_object - ) + parser = PydanticOutputParser[PydanticBaseModel](pydantic_object=pydantic_object) chain = bad_prompt | model | parser with pytest.raises(OutputParserException): chain.invoke({}) # JSON output parser tests -@pytest.mark.parametrize("pydantic_object", [ForecastV2, ForecastV1]) +@pytest.mark.parametrize("pydantic_object", _FORECAST_MODELS) def test_json_parser_chaining( - pydantic_object: TBaseModel, + pydantic_object: TypeBaseModel, ) -> None: prompt = PromptTemplate( template="""{{ @@ -185,6 +192,14 @@ def test_pydantic_output_parser_type_inference() -> None: } +@pytest.mark.parametrize("pydantic_object", _FORECAST_MODELS) +def test_format_instructions(pydantic_object: TypeBaseModel) -> None: + """Test format instructions.""" + parser = PydanticOutputParser[PydanticBaseModel](pydantic_object=pydantic_object) + instructions = parser.get_format_instructions() + assert "temperature" in instructions + + def test_format_instructions_preserves_language() -> None: """Test format instructions does not attempt to encode into ascii.""" description = ( diff --git a/libs/core/tests/unit_tests/prompts/__snapshots__/test_chat.ambr b/libs/core/tests/unit_tests/prompts/__snapshots__/test_chat.ambr index 282713e6f33..964aa8817b9 100644 --- a/libs/core/tests/unit_tests/prompts/__snapshots__/test_chat.ambr +++ b/libs/core/tests/unit_tests/prompts/__snapshots__/test_chat.ambr @@ -3,14 +3,13 @@ dict({ '$defs': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -110,8 +109,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -231,7 +229,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -306,7 +303,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -381,7 +377,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -389,7 +384,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -453,7 +448,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -516,11 +510,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -604,7 +597,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -688,9 +680,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -806,7 +798,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -822,7 +816,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -910,7 +903,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -981,7 +973,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -1027,7 +1019,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -1105,7 +1097,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -1114,36 +1105,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -1226,7 +1215,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -1331,8 +1319,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ @@ -1424,14 +1417,13 @@ dict({ '$defs': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -1531,8 +1523,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -1652,7 +1643,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -1727,7 +1717,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -1802,7 +1791,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -1810,7 +1798,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -1874,7 +1862,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -1937,11 +1924,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -2025,7 +2011,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -2109,9 +2094,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -2227,7 +2212,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -2243,7 +2230,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -2331,7 +2317,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -2402,7 +2387,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -2448,7 +2433,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -2526,7 +2511,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -2535,36 +2519,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -2647,7 +2629,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -2752,8 +2733,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ diff --git a/libs/core/tests/unit_tests/prompts/test_string.py b/libs/core/tests/unit_tests/prompts/test_string.py new file mode 100644 index 00000000000..96c573c72f2 --- /dev/null +++ b/libs/core/tests/unit_tests/prompts/test_string.py @@ -0,0 +1,32 @@ +import pytest +from packaging import version + +from langchain_core.prompts.string import mustache_schema +from langchain_core.utils.pydantic import PYDANTIC_VERSION + +PYDANTIC_VERSION_AT_LEAST_29 = version.parse("2.9") <= PYDANTIC_VERSION + + +@pytest.mark.skipif( + not PYDANTIC_VERSION_AT_LEAST_29, + reason=( + "Only test with most recent version of pydantic. " + "Pydantic introduced small fixes to generated JSONSchema on minor versions." + ), +) +def test_mustache_schema_parent_child() -> None: + template = "{{x.y}} {{x}}" + expected = { + "$defs": { + "x": { + "properties": {"y": {"default": None, "title": "Y", "type": "string"}}, + "title": "x", + "type": "object", + } + }, + "properties": {"x": {"$ref": "#/$defs/x", "default": None}}, + "title": "PromptInput", + "type": "object", + } + actual = mustache_schema(template).model_json_schema() + assert expected == actual diff --git a/libs/core/tests/unit_tests/prompts/test_structured.py b/libs/core/tests/unit_tests/prompts/test_structured.py index 44c6a215ba9..a3568bd380f 100644 --- a/libs/core/tests/unit_tests/prompts/test_structured.py +++ b/libs/core/tests/unit_tests/prompts/test_structured.py @@ -26,7 +26,7 @@ def _fake_runnable( class FakeStructuredChatModel(FakeListChatModel): - """Fake ChatModel for testing purposes.""" + """Fake chat model for testing purposes.""" @override def with_structured_output( diff --git a/libs/core/tests/unit_tests/pydantic_utils.py b/libs/core/tests/unit_tests/pydantic_utils.py index 2c8036a129d..b8235415343 100644 --- a/libs/core/tests/unit_tests/pydantic_utils.py +++ b/libs/core/tests/unit_tests/pydantic_utils.py @@ -92,43 +92,36 @@ def _schema(obj: Any) -> dict: replace_all_of_with_ref(schema_) remove_all_none_default(schema_) + _remove_additionalproperties(schema_) _remove_enum(schema_) return schema_ -def _remove_additionalproperties_from_untyped_dicts(schema: dict) -> dict[str, Any]: +def _remove_additionalproperties(schema: dict) -> dict[str, Any]: """Remove `"additionalProperties": True` from dicts in the schema. Pydantic 2.11 and later versions include `"additionalProperties": True` when generating JSON schemas for dict properties with `Any` or `object` values. + + Pydantic 2.12 and later versions include `"additionalProperties": True` when + generating JSON schemas for `TypedDict`. """ + if isinstance(schema, dict): + if ( + schema.get("type") == "object" + and schema.get("additionalProperties") is True + ): + schema.pop("additionalProperties", None) - def _remove_dict_additional_props( - obj: dict[str, Any] | list[Any], *, inside_properties: bool = False - ) -> None: - if isinstance(obj, dict): - if ( - inside_properties - and obj.get("type") == "object" - and obj.get("additionalProperties") is True - ): - obj.pop("additionalProperties", None) + # Recursively scan children + for value in schema.values(): + _remove_additionalproperties(value) - # Recursively scan children - for key, value in obj.items(): - # We are "inside_properties" if the *current* key is "properties", - # or if we were already inside properties in the caller. - next_inside_properties = inside_properties or (key == "properties") - _remove_dict_additional_props( - value, inside_properties=next_inside_properties - ) + elif isinstance(schema, list): + for item in schema: + _remove_additionalproperties(item) - elif isinstance(obj, list): - for item in obj: - _remove_dict_additional_props(item, inside_properties=inside_properties) - - _remove_dict_additional_props(schema, inside_properties=False) return schema @@ -152,5 +145,5 @@ def _normalize_schema(obj: Any) -> dict[str, Any]: remove_all_none_default(data) replace_all_of_with_ref(data) _remove_enum(data) - _remove_additionalproperties_from_untyped_dicts(data) + _remove_additionalproperties(data) return data diff --git a/libs/core/tests/unit_tests/runnables/__snapshots__/test_graph.ambr b/libs/core/tests/unit_tests/runnables/__snapshots__/test_graph.ambr index b2d8068bfd8..0f788aeef95 100644 --- a/libs/core/tests/unit_tests/runnables/__snapshots__/test_graph.ambr +++ b/libs/core/tests/unit_tests/runnables/__snapshots__/test_graph.ambr @@ -427,14 +427,13 @@ 'data': dict({ '$defs': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -534,8 +533,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -655,7 +653,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -730,7 +727,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -805,7 +801,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -813,7 +808,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -877,7 +872,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -940,11 +934,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -1028,7 +1021,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -1112,9 +1104,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -1230,7 +1222,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -1246,7 +1240,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -1334,7 +1327,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -1405,7 +1397,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -1451,7 +1443,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -1529,7 +1521,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -1538,36 +1529,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -1650,7 +1639,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -1755,8 +1743,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ diff --git a/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr index 599ef032e39..ef6f6f8089b 100644 --- a/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr +++ b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr @@ -1959,14 +1959,13 @@ dict({ '$defs': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -2065,8 +2064,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -2184,7 +2182,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -2258,7 +2255,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -2332,7 +2328,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -2340,7 +2335,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -2403,7 +2398,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -2465,11 +2459,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -2552,7 +2545,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -2635,9 +2627,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -2752,7 +2744,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -2768,7 +2762,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -2855,7 +2848,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -2925,7 +2917,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -2970,7 +2962,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -3047,7 +3039,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -3056,36 +3047,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -3167,7 +3156,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -3271,8 +3259,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ @@ -3360,14 +3353,13 @@ dict({ '$defs': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -3466,8 +3458,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -3585,7 +3576,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -3659,7 +3649,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -3796,7 +3785,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -3804,7 +3792,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -3867,7 +3855,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -3929,11 +3916,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -4016,7 +4002,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -4099,9 +4084,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -4216,7 +4201,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -4251,7 +4238,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -4338,7 +4324,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -4408,7 +4393,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -4453,7 +4438,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -4530,7 +4515,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -4539,36 +4523,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -4650,7 +4632,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -4754,8 +4735,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ @@ -4855,14 +4841,13 @@ ]), 'definitions': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -4961,8 +4946,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -5080,7 +5064,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -5154,7 +5137,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -5291,7 +5273,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -5299,7 +5280,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -5362,7 +5343,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -5424,11 +5404,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -5511,7 +5490,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -5594,9 +5572,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -5711,7 +5689,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -5746,7 +5726,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -5833,7 +5812,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -5903,7 +5881,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -5948,7 +5926,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -6025,7 +6003,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -6034,36 +6011,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -6145,7 +6120,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -6249,8 +6223,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ @@ -6288,14 +6267,13 @@ dict({ 'definitions': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -6394,8 +6372,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -6513,7 +6490,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -6587,7 +6563,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -6661,7 +6636,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -6669,7 +6643,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -6732,7 +6706,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -6794,11 +6767,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -6881,7 +6853,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -6964,9 +6935,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -7081,7 +7052,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -7097,7 +7070,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -7184,7 +7156,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -7254,7 +7225,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -7299,7 +7270,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -7376,7 +7347,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -7385,36 +7355,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -7496,7 +7464,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -7600,8 +7567,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ @@ -7731,14 +7703,13 @@ ]), 'definitions': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -7837,8 +7808,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -7956,7 +7926,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -8030,7 +7999,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -8167,7 +8135,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -8175,7 +8142,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -8238,7 +8205,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -8300,11 +8266,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -8387,7 +8352,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -8470,9 +8434,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -8587,7 +8551,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -8622,7 +8588,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -8709,7 +8674,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -8779,7 +8743,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -8824,7 +8788,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -8901,7 +8865,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -8910,36 +8873,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -9021,7 +8982,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -9125,8 +9085,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ @@ -9209,14 +9174,13 @@ ]), 'definitions': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -9315,8 +9279,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -9434,7 +9397,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -9508,7 +9470,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -9582,7 +9543,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -9590,7 +9550,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -9653,7 +9613,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -9715,11 +9674,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -9802,7 +9760,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -9885,9 +9842,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -10002,7 +9959,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -10018,7 +9977,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -10105,7 +10063,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -10175,7 +10132,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -10220,7 +10177,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -10297,7 +10254,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -10306,36 +10262,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -10417,7 +10371,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -10521,8 +10474,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ @@ -10560,14 +10518,13 @@ dict({ 'definitions': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -10666,8 +10623,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -10785,7 +10741,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -10859,7 +10814,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -10996,7 +10950,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -11004,7 +10957,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -11067,7 +11020,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -11129,11 +11081,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -11216,7 +11167,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -11299,9 +11249,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -11416,7 +11366,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -11462,7 +11414,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -11549,7 +11500,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -11619,7 +11569,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -11664,7 +11614,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -11741,7 +11691,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -11750,36 +11699,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -11861,7 +11808,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -11965,8 +11911,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ @@ -12016,14 +11967,13 @@ ]), 'definitions': dict({ 'AIMessage': dict({ - 'additionalProperties': True, 'description': ''' Message from an AI. - AIMessage is returned from a chat model as a response to a prompt. + An `AIMessage` is returned from a chat model as a response to a prompt. This message represents the output of the model and consists of both - the raw output as returned by the model together standardized fields + the raw output as returned by the model and standardized fields (e.g., tool calls, usage metadata) added by the LangChain framework. ''', 'properties': dict({ @@ -12122,8 +12072,7 @@ 'type': 'object', }), 'AIMessageChunk': dict({ - 'additionalProperties': True, - 'description': 'Message chunk from an AI.', + 'description': 'Message chunk from an AI (yielded when streaming).', 'properties': dict({ 'additional_kwargs': dict({ 'title': 'Additional Kwargs', @@ -12241,7 +12190,6 @@ 'type': 'object', }), 'ChatMessage': dict({ - 'additionalProperties': True, 'description': 'Message that can be assigned an arbitrary speaker (i.e. role).', 'properties': dict({ 'additional_kwargs': dict({ @@ -12315,7 +12263,6 @@ 'type': 'object', }), 'ChatMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Chat Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -12452,7 +12399,6 @@ 'type': 'object', }), 'FunctionMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -12460,7 +12406,7 @@ do not contain the `tool_call_id` field. The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -12523,7 +12469,6 @@ 'type': 'object', }), 'FunctionMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Function Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -12585,11 +12530,10 @@ 'type': 'object', }), 'HumanMessage': dict({ - 'additionalProperties': True, 'description': ''' - Message from a human. + Message from the user. - `HumanMessage`s are messages that are passed in from a human to the model. + A `HumanMessage` is a message that is passed in from a user to the model. Example: ```python @@ -12672,7 +12616,6 @@ 'type': 'object', }), 'HumanMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Human Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -12755,9 +12698,9 @@ } ``` - !!! version-added "Added in version 0.3.9" - May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -12872,7 +12815,9 @@ } ``` - !!! version-added "Added in version 0.3.9" + May also hold extra provider-specific keys. + + !!! version-added "Added in `langchain-core` 0.3.9" ''', 'properties': dict({ 'audio': dict({ @@ -12907,7 +12852,6 @@ 'type': 'object', }), 'SystemMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for priming AI behavior. @@ -12994,7 +12938,6 @@ 'type': 'object', }), 'SystemMessageChunk': dict({ - 'additionalProperties': True, 'description': 'System Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -13064,7 +13007,7 @@ }), 'ToolCall': dict({ 'description': ''' - Represents a request to call a tool. + Represents an AI's request to call a tool. Example: ```python @@ -13109,7 +13052,7 @@ }), 'ToolCallChunk': dict({ 'description': ''' - A chunk of a tool call (e.g., as part of a stream). + A chunk of a tool call (yielded when streaming). When merging `ToolCallChunk`s (e.g., via `AIMessageChunk.__add__`), all string attributes are concatenated. Chunks are only merged if their @@ -13186,7 +13129,6 @@ 'type': 'object', }), 'ToolMessage': dict({ - 'additionalProperties': True, 'description': ''' Message for passing the result of executing a tool back to a model. @@ -13195,36 +13137,34 @@ Example: A `ToolMessage` representing a result of `42` from a tool call with id - ```python - from langchain_core.messages import ToolMessage + ```python + from langchain_core.messages import ToolMessage - ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") - ``` + ToolMessage(content="42", tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL") + ``` Example: A `ToolMessage` where only part of the tool output is sent to the model - and the full output is passed in to artifact. + and the full output is passed in to artifact. - !!! version-added "Added in version 0.2.17" + ```python + from langchain_core.messages import ToolMessage - ```python - from langchain_core.messages import ToolMessage + tool_output = { + "stdout": "From the graph we can see that the correlation between " + "x and y is ...", + "stderr": None, + "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, + } - tool_output = { - "stdout": "From the graph we can see that the correlation between " - "x and y is ...", - "stderr": None, - "artifacts": {"type": "image", "base64_data": "/9j/4gIcSU..."}, - } - - ToolMessage( - content=tool_output["stdout"], - artifact=tool_output, - tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", - ) - ``` + ToolMessage( + content=tool_output["stdout"], + artifact=tool_output, + tool_call_id="call_Jja7J89XsjrOLA5r!MEOW!SL", + ) + ``` The `tool_call_id` field is used to associate the tool call request with the - tool call response. This is useful in situations where a chat model is able + tool call response. Useful in situations where a chat model is able to request multiple tool calls in parallel. ''', 'properties': dict({ @@ -13306,7 +13246,6 @@ 'type': 'object', }), 'ToolMessageChunk': dict({ - 'additionalProperties': True, 'description': 'Tool Message chunk.', 'properties': dict({ 'additional_kwargs': dict({ @@ -13410,8 +13349,13 @@ } ``` - !!! warning "Behavior changed in 0.3.9" + !!! warning "Behavior changed in `langchain-core` 0.3.9" Added `input_token_details` and `output_token_details`. + + !!! note "LangSmith SDK" + The LangSmith SDK also has a `UsageMetadata` class. While the two share fields, + LangSmith's `UsageMetadata` has additional fields to capture cost information + used by the LangSmith platform. ''', 'properties': dict({ 'input_token_details': dict({ diff --git a/libs/core/tests/unit_tests/runnables/test_fallbacks.py b/libs/core/tests/unit_tests/runnables/test_fallbacks.py index 0a6b82540e2..1d10887c725 100644 --- a/libs/core/tests/unit_tests/runnables/test_fallbacks.py +++ b/libs/core/tests/unit_tests/runnables/test_fallbacks.py @@ -266,7 +266,7 @@ def _error(msg: str) -> None: def _generate_immediate_error(_: Iterator) -> Iterator[str]: - _error("immmediate error") + _error("immediate error") yield "" diff --git a/libs/core/tests/unit_tests/runnables/test_history.py b/libs/core/tests/unit_tests/runnables/test_history.py index 9ede918fa2d..5a1c3f827aa 100644 --- a/libs/core/tests/unit_tests/runnables/test_history.py +++ b/libs/core/tests/unit_tests/runnables/test_history.py @@ -3,7 +3,6 @@ from collections.abc import Callable, Sequence from typing import Any import pytest -from packaging import version from pydantic import BaseModel, RootModel from typing_extensions import override @@ -25,7 +24,6 @@ from langchain_core.tracers.root_listeners import ( AsyncRootListenersTracer, RootListenersTracer, ) -from langchain_core.utils.pydantic import PYDANTIC_VERSION from tests.unit_tests.pydantic_utils import _schema @@ -497,8 +495,6 @@ def test_get_output_schema() -> None: "title": "RunnableWithChatHistoryOutput", "type": "object", } - if version.parse("2.11") <= PYDANTIC_VERSION: - expected_schema["additionalProperties"] = True assert _schema(output_type) == expected_schema diff --git a/libs/core/tests/unit_tests/runnables/test_runnable.py b/libs/core/tests/unit_tests/runnables/test_runnable.py index c8afa1eea5d..4e82358d9bf 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable.py @@ -94,7 +94,7 @@ PYDANTIC_VERSION_AT_LEAST_210 = version.parse("2.10") <= PYDANTIC_VERSION class FakeTracer(BaseTracer): """Fake tracer that records LangChain execution. - It replaces run ids with deterministic UUIDs for snapshotting. + It replaces run IDs with deterministic UUIDs for snapshotting. """ def __init__(self) -> None: @@ -313,6 +313,12 @@ def test_schemas(snapshot: SnapshotAssertion) -> None: "description": "Class for storing a piece of text and " "associated metadata.\n" "\n" + "!!! note\n" + " `Document` is for **retrieval workflows**, not chat I/O. For " + "sending text\n" + " to an LLM in a conversation, use message types from " + "`langchain.messages`.\n" + "\n" "Example:\n" " ```python\n" " from langchain_core.documents import Document\n" @@ -2073,7 +2079,7 @@ async def test_prompt_with_llm( part async for part in chain.astream_log({"question": "What is your name?"}) ] - # remove ids from logs + # Remove IDs from logs for part in stream_log: for op in part.ops: if ( @@ -2284,7 +2290,7 @@ async def test_prompt_with_llm_parser( part async for part in chain.astream_log({"question": "What is your name?"}) ] - # remove ids from logs + # Remove IDs from logs for part in stream_log: for op in part.ops: if ( @@ -2472,7 +2478,7 @@ async def test_stream_log_retriever() -> None: part async for part in chain.astream_log({"question": "What is your name?"}) ] - # remove ids from logs + # Remove IDs from logs for part in stream_log: for op in part.ops: if ( @@ -2505,7 +2511,7 @@ async def test_stream_log_lists() -> None: part async for part in chain.astream_log({"question": "What is your name?"}) ] - # remove ids from logs + # Remove IDs from logs for part in stream_log: for op in part.ops: if ( @@ -5719,3 +5725,37 @@ def test_runnable_assign() -> None: result = runnable_assign.invoke({"input": 5}) assert result == {"input": 5, "add_step": {"added": 15}} + + +def test_runnable_typed_dict_schema() -> None: + """Testing that the schema is generated properly(not empty) when using TypedDict. + + subclasses to annotate the arguments of a RunnableParallel children. + """ + + class Foo(TypedDict): + foo: str + + class InputData(Foo): + bar: str + + def forward_foo(input_data: InputData) -> str: + return input_data["foo"] + + def transform_input(input_data: InputData) -> dict[str, str]: + foo = input_data["foo"] + bar = input_data["bar"] + + return {"transformed": foo + bar} + + foo_runnable = RunnableLambda(forward_foo) + other_runnable = RunnableLambda(transform_input) + + parallel = RunnableParallel( + foo=foo_runnable, + other=other_runnable, + ) + assert ( + repr(parallel.input_schema.model_validate({"foo": "Y", "bar": "Z"})) + == "RunnableParallelInput(root={'foo': 'Y', 'bar': 'Z'})" + ) diff --git a/libs/core/tests/unit_tests/runnables/test_runnable_events_v1.py b/libs/core/tests/unit_tests/runnables/test_runnable_events_v1.py index 8efd5aea9a6..0b30aa58be5 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable_events_v1.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable_events_v1.py @@ -36,10 +36,10 @@ from tests.unit_tests.stubs import _any_id_ai_message, _any_id_ai_message_chunk def _with_nulled_run_id(events: Sequence[StreamEvent]) -> list[StreamEvent]: - """Removes the run ids from events.""" + """Removes the run IDs from events.""" for event in events: - assert "parent_ids" in event, "Parent ids should be present in the event." - assert event["parent_ids"] == [], "Parent ids should be empty." + assert "parent_ids" in event, "Parent IDs should be present in the event." + assert event["parent_ids"] == [], "Parent IDs should be empty." return cast("list[StreamEvent]", [{**event, "run_id": ""} for event in events]) diff --git a/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py b/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py index de6c4030cac..42e95d6b160 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable_events_v2.py @@ -56,7 +56,7 @@ from tests.unit_tests.stubs import _any_id_ai_message, _any_id_ai_message_chunk def _with_nulled_run_id(events: Sequence[StreamEvent]) -> list[StreamEvent]: - """Removes the run ids from events.""" + """Removes the run IDs from events.""" for event in events: assert "run_id" in event, f"Event {event} does not have a run_id." assert "parent_ids" in event, f"Event {event} does not have parent_ids." diff --git a/libs/core/tests/unit_tests/test_tools.py b/libs/core/tests/unit_tests/test_tools.py index adc737e8fce..6c7dc25bba7 100644 --- a/libs/core/tests/unit_tests/test_tools.py +++ b/libs/core/tests/unit_tests/test_tools.py @@ -68,7 +68,14 @@ from langchain_core.utils.pydantic import ( create_model_v2, ) from tests.unit_tests.fake.callbacks import FakeCallbackHandler -from tests.unit_tests.pydantic_utils import _schema +from tests.unit_tests.pydantic_utils import _normalize_schema, _schema + +try: + from langgraph.prebuilt import ToolRuntime # type: ignore[import-not-found] + + HAS_LANGGRAPH = True +except ImportError: + HAS_LANGGRAPH = False def _get_tool_call_json_schema(tool: BaseTool) -> dict: @@ -105,14 +112,6 @@ class _MockSchema(BaseModel): arg3: dict | None = None -class _MockSchemaV1(BaseModelV1): - """Return the arguments directly.""" - - arg1: int - arg2: bool - arg3: dict | None = None - - class _MockStructuredTool(BaseTool): name: str = "structured_api" args_schema: type[BaseModel] = _MockSchema @@ -206,6 +205,21 @@ def test_decorator_with_specified_schema() -> None: assert isinstance(tool_func, BaseTool) assert tool_func.args_schema == _MockSchema + +@pytest.mark.skipif( + sys.version_info >= (3, 14), + reason="pydantic.v1 namespace not supported with Python 3.14+", +) +def test_decorator_with_specified_schema_pydantic_v1() -> None: + """Test that manually specified schemata are passed through to the tool.""" + + class _MockSchemaV1(BaseModelV1): + """Return the arguments directly.""" + + arg1: int + arg2: bool + arg3: dict | None = None + @tool(args_schema=cast("ArgsSchema", _MockSchemaV1)) def tool_func_v1(*, arg1: int, arg2: bool, arg3: dict | None = None) -> str: return f"{arg1} {arg2} {arg3}" @@ -229,7 +243,7 @@ def test_decorated_function_schema_equivalent() -> None: assert ( _schema(structured_tool_input.args_schema)["properties"] == _schema(_MockSchema)["properties"] - == structured_tool_input.args + == _normalize_schema(structured_tool_input.args) ) @@ -348,6 +362,10 @@ def test_structured_tool_types_parsed() -> None: assert result == expected +@pytest.mark.skipif( + sys.version_info >= (3, 14), + reason="pydantic.v1 namespace not supported with Python 3.14+", +) def test_structured_tool_types_parsed_pydantic_v1() -> None: """Test the non-primitive types are correctly passed to structured tools.""" @@ -1287,7 +1305,7 @@ def test_docstring_parsing() -> None: assert args_schema2["description"] == "The foo. Additional description here." assert args_schema2["properties"] == expected["properties"] - # Multi-line wth Returns block + # Multi-line with Returns block def foo3(bar: str, baz: int) -> str: """The foo. @@ -1880,7 +1898,10 @@ def generate_backwards_compatible_v1() -> list[Any]: # behave well with either pydantic 1 proper, # pydantic v1 from pydantic 2, # or pydantic 2 proper. -TEST_MODELS = generate_models() + generate_backwards_compatible_v1() +TEST_MODELS = generate_models() + +if sys.version_info < (3, 14): + TEST_MODELS += generate_backwards_compatible_v1() @pytest.mark.parametrize("pydantic_model", TEST_MODELS) @@ -1934,11 +1955,10 @@ def test_args_schema_as_pydantic(pydantic_model: Any) -> None: def test_args_schema_explicitly_typed() -> None: - """This should test that one can type the args schema as a pydantic model. - - Please note that this will test using pydantic 2 even though BaseTool - is a pydantic 1 model! + """This should test that one can type the args schema as a Pydantic model. + Please note that this will test using pydantic 2 even though `BaseTool` + is a Pydantic 1 model! """ class Foo(BaseModel): @@ -1981,7 +2001,7 @@ def test_args_schema_explicitly_typed() -> None: @pytest.mark.parametrize("pydantic_model", TEST_MODELS) def test_structured_tool_with_different_pydantic_versions(pydantic_model: Any) -> None: - """This should test that one can type the args schema as a pydantic model.""" + """This should test that one can type the args schema as a Pydantic model.""" def foo(a: int, b: str) -> str: """Hahaha.""" @@ -2080,6 +2100,8 @@ def test__get_all_basemodel_annotations_v2(*, use_v1_namespace: bool) -> None: A = TypeVar("A") if use_v1_namespace: + if sys.version_info >= (3, 14): + pytest.skip("pydantic.v1 namespace not supported with Python 3.14+") class ModelA(BaseModelV1, Generic[A], extra="allow"): a: A @@ -2758,3 +2780,249 @@ def test_tool_args_schema_with_annotated_type() -> None: "type": "array", } } + + +class CallbackHandlerWithInputCapture(FakeCallbackHandler): + """Callback handler that captures inputs passed to on_tool_start.""" + + captured_inputs: list[dict | None] = [] + + def on_tool_start( + self, + serialized: dict[str, Any], + input_str: str, + *, + run_id: Any, + parent_run_id: Any | None = None, + tags: list[str] | None = None, + metadata: dict[str, Any] | None = None, + inputs: dict[str, Any] | None = None, + **kwargs: Any, + ) -> Any: + """Capture the inputs passed to on_tool_start.""" + self.captured_inputs.append(inputs) + return super().on_tool_start( + serialized, + input_str, + run_id=run_id, + parent_run_id=parent_run_id, + tags=tags, + metadata=metadata, + inputs=inputs, + **kwargs, + ) + + +def test_filter_injected_args_from_callbacks() -> None: + """Test that injected tool arguments are filtered from callback inputs.""" + + @tool + def search_tool( + query: str, + state: Annotated[dict, InjectedToolArg()], + ) -> str: + """Search with injected state. + + Args: + query: The search query. + state: Injected state context. + """ + return f"Results for: {query}" + + handler = CallbackHandlerWithInputCapture(captured_inputs=[]) + result = search_tool.invoke( + {"query": "test query", "state": {"user_id": 123}}, + config={"callbacks": [handler]}, + ) + + assert result == "Results for: test query" + assert handler.tool_starts == 1 + assert len(handler.captured_inputs) == 1 + + # Verify that injected 'state' arg is filtered out + captured = handler.captured_inputs[0] + assert captured is not None + assert "query" in captured + assert "state" not in captured + assert captured["query"] == "test query" + + +def test_filter_run_manager_from_callbacks() -> None: + """Test that run_manager is filtered from callback inputs.""" + + @tool + def tool_with_run_manager( + message: str, + run_manager: CallbackManagerForToolRun | None = None, + ) -> str: + """Tool with run_manager parameter. + + Args: + message: The message to process. + run_manager: The callback manager. + """ + return f"Processed: {message}" + + handler = CallbackHandlerWithInputCapture(captured_inputs=[]) + result = tool_with_run_manager.invoke( + {"message": "hello"}, + config={"callbacks": [handler]}, + ) + + assert result == "Processed: hello" + assert handler.tool_starts == 1 + assert len(handler.captured_inputs) == 1 + + # Verify that run_manager is filtered out + captured = handler.captured_inputs[0] + assert captured is not None + assert "message" in captured + assert "run_manager" not in captured + + +def test_filter_multiple_injected_args() -> None: + """Test filtering multiple injected arguments from callback inputs.""" + + @tool + def complex_tool( + query: str, + limit: int, + state: Annotated[dict, InjectedToolArg()], + context: Annotated[str, InjectedToolArg()], + run_manager: CallbackManagerForToolRun | None = None, + ) -> str: + """Complex tool with multiple injected args. + + Args: + query: The search query. + limit: Maximum number of results. + state: Injected state. + context: Injected context. + run_manager: The callback manager. + """ + return f"Query: {query}, Limit: {limit}" + + handler = CallbackHandlerWithInputCapture(captured_inputs=[]) + result = complex_tool.invoke( + { + "query": "test", + "limit": 10, + "state": {"foo": "bar"}, + "context": "some context", + }, + config={"callbacks": [handler]}, + ) + + assert result == "Query: test, Limit: 10" + assert handler.tool_starts == 1 + assert len(handler.captured_inputs) == 1 + + # Verify that only non-injected args remain + captured = handler.captured_inputs[0] + assert captured is not None + assert captured == {"query": "test", "limit": 10} + assert "state" not in captured + assert "context" not in captured + assert "run_manager" not in captured + + +def test_no_filtering_for_string_input() -> None: + """Test that string inputs are not filtered (passed as None).""" + + @tool + def simple_tool(query: str) -> str: + """Simple tool with string input. + + Args: + query: The query string. + """ + return f"Result: {query}" + + handler = CallbackHandlerWithInputCapture(captured_inputs=[]) + result = simple_tool.invoke("test query", config={"callbacks": [handler]}) + + assert result == "Result: test query" + assert handler.tool_starts == 1 + assert len(handler.captured_inputs) == 1 + + # String inputs should result in None for the inputs parameter + assert handler.captured_inputs[0] is None + + +async def test_filter_injected_args_async() -> None: + """Test that injected args are filtered in async tool execution.""" + + @tool + async def async_search_tool( + query: str, + state: Annotated[dict, InjectedToolArg()], + ) -> str: + """Async search with injected state. + + Args: + query: The search query. + state: Injected state context. + """ + return f"Async results for: {query}" + + handler = CallbackHandlerWithInputCapture(captured_inputs=[]) + result = await async_search_tool.ainvoke( + {"query": "async test", "state": {"user_id": 456}}, + config={"callbacks": [handler]}, + ) + + assert result == "Async results for: async test" + assert handler.tool_starts == 1 + assert len(handler.captured_inputs) == 1 + + # Verify filtering in async execution + captured = handler.captured_inputs[0] + assert captured is not None + assert "query" in captured + assert "state" not in captured + assert captured["query"] == "async test" + + +@pytest.mark.skipif(not HAS_LANGGRAPH, reason="langgraph not installed") +def test_filter_tool_runtime_directly_injected_arg() -> None: + """Test that ToolRuntime (a _DirectlyInjectedToolArg) is filtered.""" + + @tool + def tool_with_runtime(query: str, limit: int, runtime: ToolRuntime) -> str: + """Tool with ToolRuntime parameter. + + Args: + query: The search query. + limit: Max results. + runtime: The tool runtime (directly injected). + """ + return f"Query: {query}, Limit: {limit}" + + handler = CallbackHandlerWithInputCapture(captured_inputs=[]) + + # Create a mock ToolRuntime instance + class MockRuntime: + """Mock ToolRuntime for testing.""" + + agent_name = "test_agent" + context: dict[str, Any] = {} + state: dict[str, Any] = {} + + result = tool_with_runtime.invoke( + { + "query": "test", + "limit": 5, + "runtime": MockRuntime(), + }, + config={"callbacks": [handler]}, + ) + + assert result == "Query: test, Limit: 5" + assert handler.tool_starts == 1 + assert len(handler.captured_inputs) == 1 + + # Verify that ToolRuntime is filtered out + captured = handler.captured_inputs[0] + assert captured is not None + assert captured == {"query": "test", "limit": 5} + assert "runtime" not in captured diff --git a/libs/core/tests/unit_tests/tracers/test_memory_stream.py b/libs/core/tests/unit_tests/tracers/test_memory_stream.py index 284e7035299..da96e6d7bb3 100644 --- a/libs/core/tests/unit_tests/tracers/test_memory_stream.py +++ b/libs/core/tests/unit_tests/tracers/test_memory_stream.py @@ -120,7 +120,7 @@ def test_send_to_closed_stream() -> None: We may want to handle this in a better way in the future. """ - event_loop = asyncio.get_event_loop() + event_loop = asyncio.new_event_loop() channel = _MemoryStream[str](event_loop) writer = channel.get_send_stream() # send with an open even loop diff --git a/libs/core/tests/unit_tests/tracers/test_run_collector.py b/libs/core/tests/unit_tests/tracers/test_run_collector.py index 95c0052cf21..17f6a973fce 100644 --- a/libs/core/tests/unit_tests/tracers/test_run_collector.py +++ b/libs/core/tests/unit_tests/tracers/test_run_collector.py @@ -7,9 +7,9 @@ from langchain_core.tracers.context import collect_runs def test_collect_runs() -> None: - llm = FakeListLLM(responses=["hello"]) + model = FakeListLLM(responses=["hello"]) with collect_runs() as cb: - llm.invoke("hi") + model.invoke("hi") assert cb.traced_runs assert len(cb.traced_runs) == 1 assert isinstance(cb.traced_runs[0].id, uuid.UUID) diff --git a/libs/core/tests/unit_tests/tracers/test_schemas.py b/libs/core/tests/unit_tests/tracers/test_schemas.py index 4f86d0fe541..97f476059f5 100644 --- a/libs/core/tests/unit_tests/tracers/test_schemas.py +++ b/libs/core/tests/unit_tests/tracers/test_schemas.py @@ -5,17 +5,7 @@ from langchain_core.tracers.schemas import __all__ as schemas_all def test_public_api() -> None: """Test for changes in the public API.""" expected_all = [ - "BaseRun", - "ChainRun", - "LLMRun", "Run", - "RunTypeEnum", - "ToolRun", - "TracerSession", - "TracerSessionBase", - "TracerSessionV1", - "TracerSessionV1Base", - "TracerSessionV1Create", ] assert sorted(schemas_all) == expected_all diff --git a/libs/core/tests/unit_tests/utils/test_function_calling.py b/libs/core/tests/unit_tests/utils/test_function_calling.py index 3fdfd63e087..c4edce261be 100644 --- a/libs/core/tests/unit_tests/utils/test_function_calling.py +++ b/libs/core/tests/unit_tests/utils/test_function_calling.py @@ -1155,3 +1155,16 @@ def test_convert_to_openai_function_nested_strict_2() -> None: actual = convert_to_openai_function(my_function, strict=True) assert actual == expected + + +def test_convert_to_openai_function_strict_required() -> None: + class MyModel(BaseModel): + """Dummy schema.""" + + arg1: int = Field(..., description="foo") + arg2: str | None = Field(None, description="bar") + + expected = ["arg1", "arg2"] + func = convert_to_openai_function(MyModel, strict=True) + actual = func["parameters"]["required"] + assert actual == expected diff --git a/libs/core/tests/unit_tests/utils/test_pydantic.py b/libs/core/tests/unit_tests/utils/test_pydantic.py index 0793c752108..ef4775c0e41 100644 --- a/libs/core/tests/unit_tests/utils/test_pydantic.py +++ b/libs/core/tests/unit_tests/utils/test_pydantic.py @@ -1,8 +1,10 @@ """Test for some custom pydantic decorators.""" +import sys import warnings from typing import Any +import pytest from pydantic import BaseModel, ConfigDict, Field from pydantic.v1 import BaseModel as BaseModelV1 @@ -139,6 +141,10 @@ def test_fields_pydantic_v2_proper() -> None: assert fields == {"x": Foo.model_fields["x"]} +@pytest.mark.skipif( + sys.version_info >= (3, 14), + reason="pydantic.v1 namespace not supported with Python 3.14+", +) def test_fields_pydantic_v1_from_2() -> None: class Foo(BaseModelV1): x: int diff --git a/libs/core/tests/unit_tests/utils/test_strings.py b/libs/core/tests/unit_tests/utils/test_strings.py index 2cf8aa14b4c..9e94517cec9 100644 --- a/libs/core/tests/unit_tests/utils/test_strings.py +++ b/libs/core/tests/unit_tests/utils/test_strings.py @@ -62,7 +62,7 @@ def test_stringify_value_nested_structures() -> None: result = stringify_value(nested_data) - # Shoudl contain all the nested values + # Should contain all the nested values assert "users:" in result assert "name: Alice" in result assert "name: Bob" in result diff --git a/libs/core/tests/unit_tests/utils/test_utils.py b/libs/core/tests/unit_tests/utils/test_utils.py index 8cc1c559ecc..f70063c421e 100644 --- a/libs/core/tests/unit_tests/utils/test_utils.py +++ b/libs/core/tests/unit_tests/utils/test_utils.py @@ -1,5 +1,6 @@ import os import re +import sys from collections.abc import Callable from contextlib import AbstractContextManager, nullcontext from copy import deepcopy @@ -214,6 +215,10 @@ def test_guard_import_failure( guard_import(module_name, pip_name=pip_name, package=package) +@pytest.mark.skipif( + sys.version_info >= (3, 14), + reason="pydantic.v1 namespace not supported with Python 3.14+", +) def test_get_pydantic_field_names_v1_in_2() -> None: class PydanticV1Model(PydanticV1BaseModel): field1: str diff --git a/libs/core/tests/unit_tests/vectorstores/test_vectorstore.py b/libs/core/tests/unit_tests/vectorstores/test_vectorstore.py index e3904b7ad69..f8af00ee79b 100644 --- a/libs/core/tests/unit_tests/vectorstores/test_vectorstore.py +++ b/libs/core/tests/unit_tests/vectorstores/test_vectorstore.py @@ -21,7 +21,7 @@ if TYPE_CHECKING: class CustomAddTextsVectorstore(VectorStore): - """A vectorstore that only implements add texts.""" + """A VectorStore that only implements add texts.""" def __init__(self) -> None: self.store: dict[str, Document] = {} @@ -72,7 +72,7 @@ class CustomAddTextsVectorstore(VectorStore): class CustomAddDocumentsVectorstore(VectorStore): - """A vectorstore that only implements add documents.""" + """A VectorStore that only implements add documents.""" def __init__(self) -> None: self.store: dict[str, Document] = {} @@ -249,7 +249,7 @@ def test_default_from_documents(vs_class: type[VectorStore]) -> None: Document(id="1", page_content="hello", metadata={"foo": "bar"}) ] - # from_documents with ids in args + # from_documents with IDs in args store = vs_class.from_documents( [Document(page_content="hello", metadata={"foo": "bar"})], embeddings, ids=["1"] ) @@ -278,7 +278,7 @@ async def test_default_afrom_documents(vs_class: type[VectorStore]) -> None: Document(id="1", page_content="hello", metadata={"foo": "bar"}) ] - # from_documents with ids in args + # from_documents with IDs in args store = await vs_class.afrom_documents( [Document(page_content="hello", metadata={"foo": "bar"})], embeddings, ids=["1"] ) @@ -287,7 +287,7 @@ async def test_default_afrom_documents(vs_class: type[VectorStore]) -> None: Document(id="1", page_content="hello", metadata={"foo": "bar"}) ] - # Test afrom_documents with id specified in both document and ids + # Test afrom_documents with id specified in both document and IDs original_document = Document(id="7", page_content="baz") store = await vs_class.afrom_documents([original_document], embeddings, ids=["6"]) assert original_document.id == "7" # original document should not be modified diff --git a/libs/core/uv.lock b/libs/core/uv.lock index 85d27ad99bd..736a129bdd7 100644 --- a/libs/core/uv.lock +++ b/libs/core/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.10.0, <4.0.0" resolution-markers = [ "python_full_version >= '3.14' and platform_python_implementation == 'PyPy'", @@ -960,7 +960,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a8" +version = "1.0.3" source = { editable = "." } dependencies = [ { name = "jsonpatch" }, @@ -985,6 +985,7 @@ test = [ { name = "blockbuster" }, { name = "freezegun" }, { name = "grandalf" }, + { name = "langchain-model-profiles" }, { name = "langchain-tests" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, @@ -1000,6 +1001,7 @@ test = [ { name = "syrupy" }, ] typing = [ + { name = "langchain-model-profiles" }, { name = "langchain-text-splitters" }, { name = "mypy" }, { name = "types-pyyaml" }, @@ -1029,6 +1031,7 @@ test = [ { name = "blockbuster", specifier = ">=1.5.18,<1.6.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "grandalf", specifier = ">=0.8.0,<1.0.0" }, + { name = "langchain-model-profiles", directory = "../model-profiles" }, { name = "langchain-tests", directory = "../standard-tests" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, @@ -1045,15 +1048,56 @@ test = [ ] test-integration = [] typing = [ + { name = "langchain-model-profiles", directory = "../model-profiles" }, { name = "langchain-text-splitters", directory = "../text-splitters" }, { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, { name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" }, ] +[[package]] +name = "langchain-model-profiles" +version = "0.0.3" +source = { directory = "../model-profiles" } +dependencies = [ + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] + +[package.metadata] +requires-dist = [ + { name = "tomli", marker = "python_full_version < '3.11'", specifier = ">=2.0.0,<3.0.0" }, + { name = "typing-extensions", specifier = ">=4.7.0,<5.0.0" }, +] + +[package.metadata.requires-dev] +dev = [{ name = "httpx", specifier = ">=0.23.0,<1" }] +lint = [ + { name = "langchain", editable = "../langchain_v1" }, + { name = "ruff", specifier = ">=0.12.2,<0.13.0" }, +] +test = [ + { name = "langchain", extras = ["openai"], editable = "../langchain_v1" }, + { name = "langchain-core", editable = "." }, + { name = "pytest", specifier = ">=8.0.0,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.23.2,<2.0.0" }, + { name = "pytest-cov", specifier = ">=4.0.0,<8.0.0" }, + { name = "pytest-mock" }, + { name = "pytest-socket", specifier = ">=0.6.0,<1.0.0" }, + { name = "pytest-watcher", specifier = ">=0.2.6,<1.0.0" }, + { name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" }, + { name = "syrupy", specifier = ">=4.0.2,<5.0.0" }, + { name = "toml", specifier = ">=0.10.2,<1.0.0" }, +] +test-integration = [{ name = "langchain-core", editable = "." }] +typing = [ + { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, + { name = "types-toml", specifier = ">=0.10.8.20240310,<1.0.0.0" }, +] + [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.1" source = { directory = "../standard-tests" } dependencies = [ { name = "httpx" }, @@ -1098,7 +1142,7 @@ typing = [ [[package]] name = "langchain-text-splitters" -version = "1.0.0a1" +version = "1.0.0" source = { directory = "../text-splitters" } dependencies = [ { name = "langchain-core" }, @@ -1131,8 +1175,8 @@ test-integration = [ { name = "nltk", specifier = ">=3.9.1,<4.0.0" }, { name = "scipy", marker = "python_full_version == '3.12.*'", specifier = ">=1.7.0,<2.0.0" }, { name = "scipy", marker = "python_full_version >= '3.13'", specifier = ">=1.14.1,<2.0.0" }, - { name = "sentence-transformers", specifier = ">=3.0.1,<4.0.0" }, - { name = "spacy", specifier = ">=3.8.7,<4.0.0" }, + { name = "sentence-transformers", marker = "python_full_version < '3.14'", specifier = ">=3.0.1,<4.0.0" }, + { name = "spacy", marker = "python_full_version < '3.14'", specifier = ">=3.8.7,<4.0.0" }, { name = "thinc", specifier = ">=8.3.6,<9.0.0" }, { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "transformers", specifier = ">=4.51.3,<5.0.0" }, @@ -2021,7 +2065,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -2029,96 +2073,119 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/da/b8a7ee04378a53f6fefefc0c5e05570a3ebfdfa0523a878bcd3b475683ee/pydantic-2.12.0.tar.gz", hash = "sha256:c1a077e6270dbfb37bfd8b498b3981e2bb18f68103720e51fa6c306a5a9af563", size = 814760, upload-time = "2025-10-07T15:58:03.467Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9d/d5c855424e2e5b6b626fbc6ec514d8e655a600377ce283008b115abb7445/pydantic-2.12.0-py3-none-any.whl", hash = "sha256:f6a1da352d42790537e95e83a8bdfb91c7efbae63ffd0b86fa823899e807116f", size = 459730, upload-time = "2025-10-07T15:58:01.576Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/14/12b4a0d2b0b10d8e1d9a24ad94e7bbb43335eaf29c0c4e57860e8a30734a/pydantic_core-2.41.1.tar.gz", hash = "sha256:1ad375859a6d8c356b7704ec0f547a58e82ee80bb41baa811ad710e124bc8f2f", size = 454870, upload-time = "2025-10-07T10:50:45.974Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2c/a5c4640dc7132540109f67fe83b566fbc7512ccf2a068cfa22a243df70c7/pydantic_core-2.41.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e63036298322e9aea1c8b7c0a6c1204d615dbf6ec0668ce5b83ff27f07404a61", size = 2113814, upload-time = "2025-10-06T21:09:50.892Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e7/a8694c3454a57842095d69c7a4ab3cf81c3c7b590f052738eabfdfc2e234/pydantic_core-2.41.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:241299ca91fc77ef64f11ed909d2d9220a01834e8e6f8de61275c4dd16b7c936", size = 1916660, upload-time = "2025-10-06T21:09:52.783Z" }, + { url = "https://files.pythonhosted.org/packages/9c/58/29f12e65b19c1877a0269eb4f23c5d2267eded6120a7d6762501ab843dc9/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab7e594a2a5c24ab8013a7dc8cfe5f2260e80e490685814122081705c2cf2b0", size = 1975071, upload-time = "2025-10-06T21:09:54.009Z" }, + { url = "https://files.pythonhosted.org/packages/98/26/4e677f2b7ec3fbdd10be6b586a82a814c8ebe3e474024c8df2d4260e564e/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b054ef1a78519cb934b58e9c90c09e93b837c935dcd907b891f2b265b129eb6e", size = 2067271, upload-time = "2025-10-06T21:09:55.175Z" }, + { url = "https://files.pythonhosted.org/packages/29/50/50614bd906089904d7ca1be3b9ecf08c00a327143d48f1decfdc21b3c302/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2ab7d10d0ab2ed6da54c757233eb0f48ebfb4f86e9b88ccecb3f92bbd61a538", size = 2253207, upload-time = "2025-10-06T21:09:56.709Z" }, + { url = "https://files.pythonhosted.org/packages/ea/58/b1e640b4ca559273cca7c28e0fe8891d5d8e9a600f5ab4882670ec107549/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2757606b7948bb853a27e4040820306eaa0ccb9e8f9f8a0fa40cb674e170f350", size = 2375052, upload-time = "2025-10-06T21:09:57.97Z" }, + { url = "https://files.pythonhosted.org/packages/53/25/cd47df3bfb24350e03835f0950288d1054f1cc9a8023401dabe6d4ff2834/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cec0e75eb61f606bad0a32f2be87507087514e26e8c73db6cbdb8371ccd27917", size = 2076834, upload-time = "2025-10-06T21:09:59.58Z" }, + { url = "https://files.pythonhosted.org/packages/ec/b4/71b2c77e5df527fbbc1a03e72c3fd96c44cd10d4241a81befef8c12b9fc4/pydantic_core-2.41.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0234236514f44a5bf552105cfe2543a12f48203397d9d0f866affa569345a5b5", size = 2195374, upload-time = "2025-10-06T21:10:01.18Z" }, + { url = "https://files.pythonhosted.org/packages/aa/08/4b8a50733005865efde284fec45da75fe16a258f706e16323c5ace4004eb/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1b974e41adfbb4ebb0f65fc4ca951347b17463d60893ba7d5f7b9bb087c83897", size = 2156060, upload-time = "2025-10-06T21:10:02.74Z" }, + { url = "https://files.pythonhosted.org/packages/83/c3/1037cb603ef2130c210150a51b1710d86825b5c28df54a55750099f91196/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:248dafb3204136113c383e91a4d815269f51562b6659b756cf3df14eefc7d0bb", size = 2331640, upload-time = "2025-10-06T21:10:04.39Z" }, + { url = "https://files.pythonhosted.org/packages/56/4c/52d111869610e6b1a46e1f1035abcdc94d0655587e39104433a290e9f377/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:678f9d76a91d6bcedd7568bbf6beb77ae8447f85d1aeebaab7e2f0829cfc3a13", size = 2329844, upload-time = "2025-10-06T21:10:05.68Z" }, + { url = "https://files.pythonhosted.org/packages/32/5d/4b435f0b52ab543967761aca66b84ad3f0026e491e57de47693d15d0a8db/pydantic_core-2.41.1-cp310-cp310-win32.whl", hash = "sha256:dff5bee1d21ee58277900692a641925d2dddfde65182c972569b1a276d2ac8fb", size = 1991289, upload-time = "2025-10-06T21:10:07.199Z" }, + { url = "https://files.pythonhosted.org/packages/88/52/31b4deafc1d3cb96d0e7c0af70f0dc05454982d135d07f5117e6336153e8/pydantic_core-2.41.1-cp310-cp310-win_amd64.whl", hash = "sha256:5042da12e5d97d215f91567110fdfa2e2595a25f17c19b9ff024f31c34f9b53e", size = 2027747, upload-time = "2025-10-06T21:10:08.503Z" }, + { url = "https://files.pythonhosted.org/packages/f6/a9/ec440f02e57beabdfd804725ef1e38ac1ba00c49854d298447562e119513/pydantic_core-2.41.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4f276a6134fe1fc1daa692642a3eaa2b7b858599c49a7610816388f5e37566a1", size = 2111456, upload-time = "2025-10-06T21:10:09.824Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f9/6bc15bacfd8dcfc073a1820a564516d9c12a435a9a332d4cbbfd48828ddd/pydantic_core-2.41.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07588570a805296ece009c59d9a679dc08fab72fb337365afb4f3a14cfbfc176", size = 1915012, upload-time = "2025-10-06T21:10:11.599Z" }, + { url = "https://files.pythonhosted.org/packages/38/8a/d9edcdcdfe80bade17bed424284427c08bea892aaec11438fa52eaeaf79c/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28527e4b53400cd60ffbd9812ccb2b5135d042129716d71afd7e45bf42b855c0", size = 1973762, upload-time = "2025-10-06T21:10:13.154Z" }, + { url = "https://files.pythonhosted.org/packages/d5/b3/ff225c6d49fba4279de04677c1c876fc3dc6562fd0c53e9bfd66f58c51a8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46a1c935c9228bad738c8a41de06478770927baedf581d172494ab36a6b96575", size = 2065386, upload-time = "2025-10-06T21:10:14.436Z" }, + { url = "https://files.pythonhosted.org/packages/47/ba/183e8c0be4321314af3fd1ae6bfc7eafdd7a49bdea5da81c56044a207316/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:447ddf56e2b7d28d200d3e9eafa936fe40485744b5a824b67039937580b3cb20", size = 2252317, upload-time = "2025-10-06T21:10:15.719Z" }, + { url = "https://files.pythonhosted.org/packages/57/c5/aab61e94fd02f45c65f1f8c9ec38bb3b33fbf001a1837c74870e97462572/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63892ead40c1160ac860b5debcc95c95c5a0035e543a8b5a4eac70dd22e995f4", size = 2373405, upload-time = "2025-10-06T21:10:17.017Z" }, + { url = "https://files.pythonhosted.org/packages/e5/4f/3aaa3bd1ea420a15acc42d7d3ccb3b0bbc5444ae2f9dbc1959f8173e16b8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4a9543ca355e6df8fbe9c83e9faab707701e9103ae857ecb40f1c0cf8b0e94d", size = 2073794, upload-time = "2025-10-06T21:10:18.383Z" }, + { url = "https://files.pythonhosted.org/packages/58/bd/e3975cdebe03ec080ef881648de316c73f2a6be95c14fc4efb2f7bdd0d41/pydantic_core-2.41.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2611bdb694116c31e551ed82e20e39a90bea9b7ad9e54aaf2d045ad621aa7a1", size = 2194430, upload-time = "2025-10-06T21:10:19.638Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/6b7e7217f147d3b3105b57fb1caec3c4f667581affdfaab6d1d277e1f749/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fecc130893a9b5f7bfe230be1bb8c61fe66a19db8ab704f808cb25a82aad0bc9", size = 2154611, upload-time = "2025-10-06T21:10:21.28Z" }, + { url = "https://files.pythonhosted.org/packages/fe/7b/239c2fe76bd8b7eef9ae2140d737368a3c6fea4fd27f8f6b4cde6baa3ce9/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:1e2df5f8344c99b6ea5219f00fdc8950b8e6f2c422fbc1cc122ec8641fac85a1", size = 2329809, upload-time = "2025-10-06T21:10:22.678Z" }, + { url = "https://files.pythonhosted.org/packages/bd/2e/77a821a67ff0786f2f14856d6bd1348992f695ee90136a145d7a445c1ff6/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:35291331e9d8ed94c257bab6be1cb3a380b5eee570a2784bffc055e18040a2ea", size = 2327907, upload-time = "2025-10-06T21:10:24.447Z" }, + { url = "https://files.pythonhosted.org/packages/fd/9a/b54512bb9df7f64c586b369328c30481229b70ca6a5fcbb90b715e15facf/pydantic_core-2.41.1-cp311-cp311-win32.whl", hash = "sha256:2876a095292668d753f1a868c4a57c4ac9f6acbd8edda8debe4218d5848cf42f", size = 1989964, upload-time = "2025-10-06T21:10:25.676Z" }, + { url = "https://files.pythonhosted.org/packages/9d/72/63c9a4f1a5c950e65dd522d7dd67f167681f9d4f6ece3b80085a0329f08f/pydantic_core-2.41.1-cp311-cp311-win_amd64.whl", hash = "sha256:b92d6c628e9a338846a28dfe3fcdc1a3279388624597898b105e078cdfc59298", size = 2025158, upload-time = "2025-10-06T21:10:27.522Z" }, + { url = "https://files.pythonhosted.org/packages/d8/16/4e2706184209f61b50c231529257c12eb6bd9eb36e99ea1272e4815d2200/pydantic_core-2.41.1-cp311-cp311-win_arm64.whl", hash = "sha256:7d82ae99409eb69d507a89835488fb657faa03ff9968a9379567b0d2e2e56bc5", size = 1972297, upload-time = "2025-10-06T21:10:28.814Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bc/5f520319ee1c9e25010412fac4154a72e0a40d0a19eb00281b1f200c0947/pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:db2f82c0ccbce8f021ad304ce35cbe02aa2f95f215cac388eed542b03b4d5eb4", size = 2099300, upload-time = "2025-10-06T21:10:30.463Z" }, + { url = "https://files.pythonhosted.org/packages/31/14/010cd64c5c3814fb6064786837ec12604be0dd46df3327cf8474e38abbbd/pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47694a31c710ced9205d5f1e7e8af3ca57cbb8a503d98cb9e33e27c97a501601", size = 1910179, upload-time = "2025-10-06T21:10:31.782Z" }, + { url = "https://files.pythonhosted.org/packages/8e/2e/23fc2a8a93efad52df302fdade0a60f471ecc0c7aac889801ac24b4c07d6/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e9decce94daf47baf9e9d392f5f2557e783085f7c5e522011545d9d6858e00", size = 1957225, upload-time = "2025-10-06T21:10:33.11Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b6/6db08b2725b2432b9390844852e11d320281e5cea8a859c52c68001975fa/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab0adafdf2b89c8b84f847780a119437a0931eca469f7b44d356f2b426dd9741", size = 2053315, upload-time = "2025-10-06T21:10:34.87Z" }, + { url = "https://files.pythonhosted.org/packages/61/d9/4de44600f2d4514b44f3f3aeeda2e14931214b6b5bf52479339e801ce748/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5da98cc81873f39fd56882e1569c4677940fbc12bce6213fad1ead784192d7c8", size = 2224298, upload-time = "2025-10-06T21:10:36.233Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ae/dbe51187a7f35fc21b283c5250571a94e36373eb557c1cba9f29a9806dcf/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:209910e88afb01fd0fd403947b809ba8dba0e08a095e1f703294fda0a8fdca51", size = 2351797, upload-time = "2025-10-06T21:10:37.601Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a7/975585147457c2e9fb951c7c8dab56deeb6aa313f3aa72c2fc0df3f74a49/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365109d1165d78d98e33c5bfd815a9b5d7d070f578caefaabcc5771825b4ecb5", size = 2074921, upload-time = "2025-10-06T21:10:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/62/37/ea94d1d0c01dec1b7d236c7cec9103baab0021f42500975de3d42522104b/pydantic_core-2.41.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:706abf21e60a2857acdb09502bc853ee5bce732955e7b723b10311114f033115", size = 2187767, upload-time = "2025-10-06T21:10:40.651Z" }, + { url = "https://files.pythonhosted.org/packages/d3/fe/694cf9fdd3a777a618c3afd210dba7b414cb8a72b1bd29b199c2e5765fee/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bf0bd5417acf7f6a7ec3b53f2109f587be176cb35f9cf016da87e6017437a72d", size = 2136062, upload-time = "2025-10-06T21:10:42.09Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/174aeabd89916fbd2988cc37b81a59e1186e952afd2a7ed92018c22f31ca/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:2e71b1c6ceb9c78424ae9f63a07292fb769fb890a4e7efca5554c47f33a60ea5", size = 2317819, upload-time = "2025-10-06T21:10:43.974Z" }, + { url = "https://files.pythonhosted.org/packages/65/e8/e9aecafaebf53fc456314f72886068725d6fba66f11b013532dc21259343/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80745b9770b4a38c25015b517451c817799bfb9d6499b0d13d8227ec941cb513", size = 2312267, upload-time = "2025-10-06T21:10:45.34Z" }, + { url = "https://files.pythonhosted.org/packages/35/2f/1c2e71d2a052f9bb2f2df5a6a05464a0eb800f9e8d9dd800202fe31219e1/pydantic_core-2.41.1-cp312-cp312-win32.whl", hash = "sha256:83b64d70520e7890453f1aa21d66fda44e7b35f1cfea95adf7b4289a51e2b479", size = 1990927, upload-time = "2025-10-06T21:10:46.738Z" }, + { url = "https://files.pythonhosted.org/packages/b1/78/562998301ff2588b9c6dcc5cb21f52fa919d6e1decc75a35055feb973594/pydantic_core-2.41.1-cp312-cp312-win_amd64.whl", hash = "sha256:377defd66ee2003748ee93c52bcef2d14fde48fe28a0b156f88c3dbf9bc49a50", size = 2034703, upload-time = "2025-10-06T21:10:48.524Z" }, + { url = "https://files.pythonhosted.org/packages/b2/53/d95699ce5a5cdb44bb470bd818b848b9beadf51459fd4ea06667e8ede862/pydantic_core-2.41.1-cp312-cp312-win_arm64.whl", hash = "sha256:c95caff279d49c1d6cdfe2996e6c2ad712571d3b9caaa209a404426c326c4bde", size = 1972719, upload-time = "2025-10-06T21:10:50.256Z" }, + { url = "https://files.pythonhosted.org/packages/27/8a/6d54198536a90a37807d31a156642aae7a8e1263ed9fe6fc6245defe9332/pydantic_core-2.41.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70e790fce5f05204ef4403159857bfcd587779da78627b0babb3654f75361ebf", size = 2105825, upload-time = "2025-10-06T21:10:51.719Z" }, + { url = "https://files.pythonhosted.org/packages/4f/2e/4784fd7b22ac9c8439db25bf98ffed6853d01e7e560a346e8af821776ccc/pydantic_core-2.41.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9cebf1ca35f10930612d60bd0f78adfacee824c30a880e3534ba02c207cceceb", size = 1910126, upload-time = "2025-10-06T21:10:53.145Z" }, + { url = "https://files.pythonhosted.org/packages/f3/92/31eb0748059ba5bd0aa708fb4bab9fcb211461ddcf9e90702a6542f22d0d/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:170406a37a5bc82c22c3274616bf6f17cc7df9c4a0a0a50449e559cb755db669", size = 1961472, upload-time = "2025-10-06T21:10:55.754Z" }, + { url = "https://files.pythonhosted.org/packages/ab/91/946527792275b5c4c7dde4cfa3e81241bf6900e9fee74fb1ba43e0c0f1ab/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12d4257fc9187a0ccd41b8b327d6a4e57281ab75e11dda66a9148ef2e1fb712f", size = 2063230, upload-time = "2025-10-06T21:10:57.179Z" }, + { url = "https://files.pythonhosted.org/packages/31/5d/a35c5d7b414e5c0749f1d9f0d159ee2ef4bab313f499692896b918014ee3/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a75a33b4db105dd1c8d57839e17ee12db8d5ad18209e792fa325dbb4baeb00f4", size = 2229469, upload-time = "2025-10-06T21:10:59.409Z" }, + { url = "https://files.pythonhosted.org/packages/21/4d/8713737c689afa57ecfefe38db78259d4484c97aa494979e6a9d19662584/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a589f850803a74e0fcb16a72081cafb0d72a3cdda500106942b07e76b7bf62", size = 2347986, upload-time = "2025-10-06T21:11:00.847Z" }, + { url = "https://files.pythonhosted.org/packages/f6/ec/929f9a3a5ed5cda767081494bacd32f783e707a690ce6eeb5e0730ec4986/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97939d6ea44763c456bd8a617ceada2c9b96bb5b8ab3dfa0d0827df7619014", size = 2072216, upload-time = "2025-10-06T21:11:02.43Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/a33f459d4f9cc8786d9db42795dbecc84fa724b290d7d71ddc3d7155d46a/pydantic_core-2.41.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae423c65c556f09569524b80ffd11babff61f33055ef9773d7c9fabc11ed8d", size = 2193047, upload-time = "2025-10-06T21:11:03.787Z" }, + { url = "https://files.pythonhosted.org/packages/77/af/d5c6959f8b089f2185760a2779079e3c2c411bfc70ea6111f58367851629/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:4dc703015fbf8764d6a8001c327a87f1823b7328d40b47ce6000c65918ad2b4f", size = 2140613, upload-time = "2025-10-06T21:11:05.607Z" }, + { url = "https://files.pythonhosted.org/packages/58/e5/2c19bd2a14bffe7fabcf00efbfbd3ac430aaec5271b504a938ff019ac7be/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:968e4ffdfd35698a5fe659e5e44c508b53664870a8e61c8f9d24d3d145d30257", size = 2327641, upload-time = "2025-10-06T21:11:07.143Z" }, + { url = "https://files.pythonhosted.org/packages/93/ef/e0870ccda798c54e6b100aff3c4d49df5458fd64217e860cb9c3b0a403f4/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:fff2b76c8e172d34771cd4d4f0ade08072385310f214f823b5a6ad4006890d32", size = 2318229, upload-time = "2025-10-06T21:11:08.73Z" }, + { url = "https://files.pythonhosted.org/packages/b1/4b/c3b991d95f5deb24d0bd52e47bcf716098fa1afe0ce2d4bd3125b38566ba/pydantic_core-2.41.1-cp313-cp313-win32.whl", hash = "sha256:a38a5263185407ceb599f2f035faf4589d57e73c7146d64f10577f6449e8171d", size = 1997911, upload-time = "2025-10-06T21:11:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ce/5c316fd62e01f8d6be1b7ee6b54273214e871772997dc2c95e204997a055/pydantic_core-2.41.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42ae7fd6760782c975897e1fdc810f483b021b32245b0105d40f6e7a3803e4b", size = 2034301, upload-time = "2025-10-06T21:11:12.113Z" }, + { url = "https://files.pythonhosted.org/packages/29/41/902640cfd6a6523194123e2c3373c60f19006447f2fb06f76de4e8466c5b/pydantic_core-2.41.1-cp313-cp313-win_arm64.whl", hash = "sha256:ad4111acc63b7384e205c27a2f15e23ac0ee21a9d77ad6f2e9cb516ec90965fb", size = 1977238, upload-time = "2025-10-06T21:11:14.1Z" }, + { url = "https://files.pythonhosted.org/packages/04/04/28b040e88c1b89d851278478842f0bdf39c7a05da9e850333c6c8cbe7dfa/pydantic_core-2.41.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:440d0df7415b50084a4ba9d870480c16c5f67c0d1d4d5119e3f70925533a0edc", size = 1875626, upload-time = "2025-10-06T21:11:15.69Z" }, + { url = "https://files.pythonhosted.org/packages/d6/58/b41dd3087505220bb58bc81be8c3e8cbc037f5710cd3c838f44f90bdd704/pydantic_core-2.41.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71eaa38d342099405dae6484216dcf1e8e4b0bebd9b44a4e08c9b43db6a2ab67", size = 2045708, upload-time = "2025-10-06T21:11:17.258Z" }, + { url = "https://files.pythonhosted.org/packages/d7/b8/760f23754e40bf6c65b94a69b22c394c24058a0ef7e2aa471d2e39219c1a/pydantic_core-2.41.1-cp313-cp313t-win_amd64.whl", hash = "sha256:555ecf7e50f1161d3f693bc49f23c82cf6cdeafc71fa37a06120772a09a38795", size = 1997171, upload-time = "2025-10-06T21:11:18.822Z" }, + { url = "https://files.pythonhosted.org/packages/41/12/cec246429ddfa2778d2d6301eca5362194dc8749ecb19e621f2f65b5090f/pydantic_core-2.41.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:05226894a26f6f27e1deb735d7308f74ef5fa3a6de3e0135bb66cdcaee88f64b", size = 2107836, upload-time = "2025-10-06T21:11:20.432Z" }, + { url = "https://files.pythonhosted.org/packages/20/39/baba47f8d8b87081302498e610aefc37142ce6a1cc98b2ab6b931a162562/pydantic_core-2.41.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:85ff7911c6c3e2fd8d3779c50925f6406d770ea58ea6dde9c230d35b52b16b4a", size = 1904449, upload-time = "2025-10-06T21:11:22.185Z" }, + { url = "https://files.pythonhosted.org/packages/50/32/9a3d87cae2c75a5178334b10358d631bd094b916a00a5993382222dbfd92/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47f1f642a205687d59b52dc1a9a607f45e588f5a2e9eeae05edd80c7a8c47674", size = 1961750, upload-time = "2025-10-06T21:11:24.348Z" }, + { url = "https://files.pythonhosted.org/packages/27/42/a96c9d793a04cf2a9773bff98003bb154087b94f5530a2ce6063ecfec583/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df11c24e138876ace5ec6043e5cae925e34cf38af1a1b3d63589e8f7b5f5cdc4", size = 2063305, upload-time = "2025-10-06T21:11:26.556Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8d/028c4b7d157a005b1f52c086e2d4b0067886b213c86220c1153398dbdf8f/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f0bf7f5c8f7bf345c527e8a0d72d6b26eda99c1227b0c34e7e59e181260de31", size = 2228959, upload-time = "2025-10-06T21:11:28.426Z" }, + { url = "https://files.pythonhosted.org/packages/08/f7/ee64cda8fcc9ca3f4716e6357144f9ee71166775df582a1b6b738bf6da57/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82b887a711d341c2c47352375d73b029418f55b20bd7815446d175a70effa706", size = 2345421, upload-time = "2025-10-06T21:11:30.226Z" }, + { url = "https://files.pythonhosted.org/packages/13/c0/e8ec05f0f5ee7a3656973ad9cd3bc73204af99f6512c1a4562f6fb4b3f7d/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5f1d5d6bbba484bdf220c72d8ecd0be460f4bd4c5e534a541bb2cd57589fb8b", size = 2065288, upload-time = "2025-10-06T21:11:32.019Z" }, + { url = "https://files.pythonhosted.org/packages/0a/25/d77a73ff24e2e4fcea64472f5e39b0402d836da9b08b5361a734d0153023/pydantic_core-2.41.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bf1917385ebe0f968dc5c6ab1375886d56992b93ddfe6bf52bff575d03662be", size = 2189759, upload-time = "2025-10-06T21:11:33.753Z" }, + { url = "https://files.pythonhosted.org/packages/66/45/4a4ebaaae12a740552278d06fe71418c0f2869537a369a89c0e6723b341d/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:4f94f3ab188f44b9a73f7295663f3ecb8f2e2dd03a69c8f2ead50d37785ecb04", size = 2140747, upload-time = "2025-10-06T21:11:35.781Z" }, + { url = "https://files.pythonhosted.org/packages/da/6d/b727ce1022f143194a36593243ff244ed5a1eb3c9122296bf7e716aa37ba/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:3925446673641d37c30bd84a9d597e49f72eacee8b43322c8999fa17d5ae5bc4", size = 2327416, upload-time = "2025-10-06T21:11:37.75Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8c/02df9d8506c427787059f87c6c7253435c6895e12472a652d9616ee0fc95/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:49bd51cc27adb980c7b97357ae036ce9b3c4d0bb406e84fbe16fb2d368b602a8", size = 2318138, upload-time = "2025-10-06T21:11:39.463Z" }, + { url = "https://files.pythonhosted.org/packages/98/67/0cf429a7d6802536941f430e6e3243f6d4b68f41eeea4b242372f1901794/pydantic_core-2.41.1-cp314-cp314-win32.whl", hash = "sha256:a31ca0cd0e4d12ea0df0077df2d487fc3eb9d7f96bbb13c3c5b88dcc21d05159", size = 1998429, upload-time = "2025-10-06T21:11:41.989Z" }, + { url = "https://files.pythonhosted.org/packages/38/60/742fef93de5d085022d2302a6317a2b34dbfe15258e9396a535c8a100ae7/pydantic_core-2.41.1-cp314-cp314-win_amd64.whl", hash = "sha256:1b5c4374a152e10a22175d7790e644fbd8ff58418890e07e2073ff9d4414efae", size = 2028870, upload-time = "2025-10-06T21:11:43.66Z" }, + { url = "https://files.pythonhosted.org/packages/31/38/cdd8ccb8555ef7720bd7715899bd6cfbe3c29198332710e1b61b8f5dd8b8/pydantic_core-2.41.1-cp314-cp314-win_arm64.whl", hash = "sha256:4fee76d757639b493eb600fba668f1e17475af34c17dd61db7a47e824d464ca9", size = 1974275, upload-time = "2025-10-06T21:11:45.476Z" }, + { url = "https://files.pythonhosted.org/packages/e7/7e/8ac10ccb047dc0221aa2530ec3c7c05ab4656d4d4bd984ee85da7f3d5525/pydantic_core-2.41.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f9b9c968cfe5cd576fdd7361f47f27adeb120517e637d1b189eea1c3ece573f4", size = 1875124, upload-time = "2025-10-06T21:11:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/c3/e4/7d9791efeb9c7d97e7268f8d20e0da24d03438a7fa7163ab58f1073ba968/pydantic_core-2.41.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ebc7ab67b856384aba09ed74e3e977dded40e693de18a4f197c67d0d4e6d8e", size = 2043075, upload-time = "2025-10-06T21:11:49.542Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c3/3f6e6b2342ac11ac8cd5cb56e24c7b14afa27c010e82a765ffa5f771884a/pydantic_core-2.41.1-cp314-cp314t-win_amd64.whl", hash = "sha256:8ae0dc57b62a762985bc7fbf636be3412394acc0ddb4ade07fe104230f1b9762", size = 1995341, upload-time = "2025-10-06T21:11:51.497Z" }, + { url = "https://files.pythonhosted.org/packages/16/89/d0afad37ba25f5801735af1472e650b86baad9fe807a42076508e4824a2a/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:68f2251559b8efa99041bb63571ec7cdd2d715ba74cc82b3bc9eff824ebc8bf0", size = 2124001, upload-time = "2025-10-07T10:49:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/8e/c4/08609134b34520568ddebb084d9ed0a2a3f5f52b45739e6e22cb3a7112eb/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:c7bc140c596097cb53b30546ca257dbe3f19282283190b1b5142928e5d5d3a20", size = 1941841, upload-time = "2025-10-07T10:49:56.248Z" }, + { url = "https://files.pythonhosted.org/packages/2a/43/94a4877094e5fe19a3f37e7e817772263e2c573c94f1e3fa2b1eee56ef3b/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2896510fce8f4725ec518f8b9d7f015a00db249d2fd40788f442af303480063d", size = 1961129, upload-time = "2025-10-07T10:49:58.298Z" }, + { url = "https://files.pythonhosted.org/packages/a2/30/23a224d7e25260eb5f69783a63667453037e07eb91ff0e62dabaadd47128/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ced20e62cfa0f496ba68fa5d6c7ee71114ea67e2a5da3114d6450d7f4683572a", size = 2148770, upload-time = "2025-10-07T10:49:59.959Z" }, + { url = "https://files.pythonhosted.org/packages/2b/3e/a51c5f5d37b9288ba30683d6e96f10fa8f1defad1623ff09f1020973b577/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b04fa9ed049461a7398138c604b00550bc89e3e1151d84b81ad6dc93e39c4c06", size = 2115344, upload-time = "2025-10-07T10:50:02.466Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bd/389504c9e0600ef4502cd5238396b527afe6ef8981a6a15cd1814fc7b434/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:b3b7d9cfbfdc43c80a16638c6dc2768e3956e73031fca64e8e1a3ae744d1faeb", size = 1927994, upload-time = "2025-10-07T10:50:04.379Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9c/5111c6b128861cb792a4c082677e90dac4f2e090bb2e2fe06aa5b2d39027/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eec83fc6abef04c7f9bec616e2d76ee9a6a4ae2a359b10c21d0f680e24a247ca", size = 1959394, upload-time = "2025-10-07T10:50:06.335Z" }, + { url = "https://files.pythonhosted.org/packages/14/3f/cfec8b9a0c48ce5d64409ec5e1903cb0b7363da38f14b41de2fcb3712700/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6771a2d9f83c4038dfad5970a3eef215940682b2175e32bcc817bdc639019b28", size = 2147365, upload-time = "2025-10-07T10:50:07.978Z" }, + { url = "https://files.pythonhosted.org/packages/d4/31/f403d7ca8352e3e4df352ccacd200f5f7f7fe81cef8e458515f015091625/pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fabcbdb12de6eada8d6e9a759097adb3c15440fafc675b3e94ae5c9cb8d678a0", size = 2114268, upload-time = "2025-10-07T10:50:10.257Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b5/334473b6d2810df84db67f03d4f666acacfc538512c2d2a254074fee0889/pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:80e97ccfaf0aaf67d55de5085b0ed0d994f57747d9d03f2de5cc9847ca737b08", size = 1935786, upload-time = "2025-10-07T10:50:12.333Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5e/45513e4dc621f47397cfa5fef12ba8fa5e8b1c4c07f2ff2a5fef8ff81b25/pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34df1fe8fea5d332484a763702e8b6a54048a9d4fe6ccf41e34a128238e01f52", size = 1971995, upload-time = "2025-10-07T10:50:14.071Z" }, + { url = "https://files.pythonhosted.org/packages/22/e3/f1797c168e5f52b973bed1c585e99827a22d5e579d1ed57d51bc15b14633/pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:421b5595f845842fc093f7250e24ee395f54ca62d494fdde96f43ecf9228ae01", size = 2191264, upload-time = "2025-10-07T10:50:15.788Z" }, + { url = "https://files.pythonhosted.org/packages/bb/e1/24ef4c3b4ab91c21c3a09a966c7d2cffe101058a7bfe5cc8b2c7c7d574e2/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dce8b22663c134583aaad24827863306a933f576c79da450be3984924e2031d1", size = 2152430, upload-time = "2025-10-07T10:50:18.018Z" }, + { url = "https://files.pythonhosted.org/packages/35/74/70c1e225d67f7ef3fdba02c506d9011efaf734020914920b2aa3d1a45e61/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:300a9c162fea9906cc5c103893ca2602afd84f0ec90d3be36f4cc360125d22e1", size = 2324691, upload-time = "2025-10-07T10:50:19.801Z" }, + { url = "https://files.pythonhosted.org/packages/c8/bf/dd4d21037c8bef0d8cce90a86a3f2dcb011c30086db2a10113c3eea23eba/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e019167628f6e6161ae7ab9fb70f6d076a0bf0d55aa9b20833f86a320c70dd65", size = 2324493, upload-time = "2025-10-07T10:50:21.568Z" }, + { url = "https://files.pythonhosted.org/packages/7e/78/3093b334e9c9796c8236a4701cd2ddef1c56fb0928fe282a10c797644380/pydantic_core-2.41.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:13ab9cc2de6f9d4ab645a050ae5aee61a2424ac4d3a16ba23d4c2027705e0301", size = 2146156, upload-time = "2025-10-07T10:50:23.475Z" }, + { url = "https://files.pythonhosted.org/packages/e6/6c/fa3e45c2b054a1e627a89a364917f12cbe3abc3e91b9004edaae16e7b3c5/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:af2385d3f98243fb733862f806c5bb9122e5fba05b373e3af40e3c82d711cef1", size = 2112094, upload-time = "2025-10-07T10:50:25.513Z" }, + { url = "https://files.pythonhosted.org/packages/e5/17/7eebc38b4658cc8e6902d0befc26388e4c2a5f2e179c561eeb43e1922c7b/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6550617a0c2115be56f90c31a5370261d8ce9dbf051c3ed53b51172dd34da696", size = 1935300, upload-time = "2025-10-07T10:50:27.715Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/9fe640194a1717a464ab861d43595c268830f98cb1e2705aa134b3544b70/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc17b6ecf4983d298686014c92ebc955a9f9baf9f57dad4065e7906e7bee6222", size = 1970417, upload-time = "2025-10-07T10:50:29.573Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ad/f4cdfaf483b78ee65362363e73b6b40c48e067078d7b146e8816d5945ad6/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:42ae9352cf211f08b04ea110563d6b1e415878eea5b4c70f6bdb17dca3b932d2", size = 2190745, upload-time = "2025-10-07T10:50:31.48Z" }, + { url = "https://files.pythonhosted.org/packages/cb/c1/18f416d40a10f44e9387497ba449f40fdb1478c61ba05c4b6bdb82300362/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e82947de92068b0a21681a13dd2102387197092fbe7defcfb8453e0913866506", size = 2150888, upload-time = "2025-10-07T10:50:33.477Z" }, + { url = "https://files.pythonhosted.org/packages/42/30/134c8a921630d8a88d6f905a562495a6421e959a23c19b0f49b660801d67/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e244c37d5471c9acdcd282890c6c4c83747b77238bfa19429b8473586c907656", size = 2324489, upload-time = "2025-10-07T10:50:36.48Z" }, + { url = "https://files.pythonhosted.org/packages/9c/48/a9263aeaebdec81e941198525b43edb3b44f27cfa4cb8005b8d3eb8dec72/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1e798b4b304a995110d41ec93653e57975620ccb2842ba9420037985e7d7284e", size = 2322763, upload-time = "2025-10-07T10:50:38.751Z" }, + { url = "https://files.pythonhosted.org/packages/1d/62/755d2bd2593f701c5839fc084e9c2c5e2418f460383ad04e3b5d0befc3ca/pydantic_core-2.41.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f1fc716c0eb1663c59699b024428ad5ec2bcc6b928527b8fe28de6cb89f47efb", size = 2144046, upload-time = "2025-10-07T10:50:40.686Z" }, ] [[package]] diff --git a/libs/langchain/README.md b/libs/langchain/README.md index 90d6129b784..32c6b513f75 100644 --- a/libs/langchain/README.md +++ b/libs/langchain/README.md @@ -1,9 +1,8 @@ -# πŸ¦œοΈπŸ”— LangChain +# πŸ¦œοΈπŸ”— LangChain Classic -⚑ Building applications with LLMs through composability ⚑ - -[![PyPI - License](https://img.shields.io/pypi/l/langchain?style=flat-square)](https://opensource.org/licenses/MIT) -[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain)](https://pypistats.org/packages/langchain) +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-classic?label=%20)](https://pypi.org/project/langchain-classic/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-classic)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-classic)](https://pypistats.org/packages/langchain-classic) [![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). @@ -13,67 +12,26 @@ To help you ship LangChain apps to production faster, check out [LangSmith](http ## Quick Install -`pip install langchain-classic` +```bash +pip install langchain-classic +``` ## πŸ€” What is this? -Large language models (LLMs) are emerging as a transformative technology, enabling developers to build applications that they previously could not. However, using these LLMs in isolation is often insufficient for creating a truly powerful app - the real power comes when you can combine them with other sources of computation or knowledge. +Legacy chains, `langchain-community` re-exports, indexing API, deprecated functionality, and more. -This library aims to assist in the development of those types of applications. Common examples of these applications include: - -**❓ Question answering with RAG** - -- [Documentation](https://python.langchain.com/docs/tutorials/rag/) -- End-to-end Example: [Chat LangChain](https://chat.langchain.com) and [repo](https://github.com/langchain-ai/chat-langchain) - -**🧱 Extracting structured output** - -- [Documentation](https://python.langchain.com/docs/tutorials/extraction/) -- End-to-end Example: [SQL Llama2 Template](https://github.com/langchain-ai/langchain-extract/) - -**πŸ€– Chatbots** - -- [Documentation](https://python.langchain.com/docs/tutorials/chatbot/) -- End-to-end Example: [Web LangChain (web researcher chatbot)](https://weblangchain.vercel.app) and [repo](https://github.com/langchain-ai/weblangchain) +In most cases, you should be using the main [`langchain`](https://pypi.org/project/langchain/) package. ## πŸ“– Documentation -Please see [our full documentation](https://python.langchain.com) on: +For full documentation, see the [API reference](https://reference.langchain.com/python/langchain_classic). -- Getting started (installation, setting up the environment, simple examples) -- How-To examples (demos, integrations, helper functions) -- Reference (full API docs) -- Resources (high-level explanation of core concepts) +## πŸ“• Releases & Versioning -## πŸš€ What can this help with? - -There are five main areas that LangChain is designed to help with. -These are, in increasing order of complexity: - -**πŸ€– Agents:** - -Agents involve an LLM making decisions about which Actions to take, taking that Action, seeing an Observation, and repeating that until done. LangChain provides a standard interface for agents, a selection of agents to choose from, and examples of end-to-end agents. - -**πŸ“š Retrieval Augmented Generation:** - -Retrieval Augmented Generation involves specific types of chains that first interact with an external data source to fetch data for use in the generation step. Examples include summarization of long pieces of text and question/answering over specific data sources. - -**🧐 Evaluation:** - -Generative models are notoriously hard to evaluate with traditional metrics. One new way of evaluating them is using language models themselves to do the evaluation. LangChain provides some prompts/chains for assisting in this. - -**πŸ“ƒ Models and Prompts:** - -This includes prompt management, prompt optimization, a generic interface for all LLMs, and common utilities for working with chat models and LLMs. - -**πŸ”— Chains:** - -Chains go beyond a single LLM call and involve sequences of calls (whether to an LLM or a different utility). LangChain provides a standard interface for chains, lots of integrations with other tools, and end-to-end chains for common applications. - -For more information on these concepts, please see our [full documentation](https://python.langchain.com). +See our [Releases](https://docs.langchain.com/oss/python/release-policy) and [Versioning](https://docs.langchain.com/oss/python/versioning) policies. ## πŸ’ Contributing As an open-source project in a rapidly developing field, we are extremely open to contributions, whether it be in the form of a new feature, improved infrastructure, or better documentation. -For detailed information on how to contribute, see the [Contributing Guide](https://python.langchain.com/docs/contributing/). +For detailed information on how to contribute, see the [Contributing Guide](https://docs.langchain.com/oss/python/contributing/overview). diff --git a/libs/langchain/langchain_classic/_api/module_import.py b/libs/langchain/langchain_classic/_api/module_import.py index b19ef5d3590..90520bb1a6c 100644 --- a/libs/langchain/langchain_classic/_api/module_import.py +++ b/libs/langchain/langchain_classic/_api/module_import.py @@ -26,7 +26,7 @@ def create_importer( imports to new imports. The function will raise deprecation warning on loops using - deprecated_lookups or fallback_module. + `deprecated_lookups` or `fallback_module`. Module lookups will import without deprecation warnings (used to speed up imports from large namespaces like llms or chat models). @@ -37,18 +37,20 @@ def create_importer( loss of type information, IDE support for going to definition etc). Args: - package: current package. Use __package__ - module_lookup: maps name of object to the module where it is defined. + package: Current package. Use `__package__` + module_lookup: Maps name of object to the module where it is defined. e.g., + ```json { "MyDocumentLoader": ( "langchain_community.document_loaders.my_document_loader" ) } - deprecated_lookups: same as module look up, but will raise + ``` + deprecated_lookups: Same as module look up, but will raise deprecation warnings. - fallback_module: module to import from if the object is not found in - module_lookup or if module_lookup is not provided. + fallback_module: Module to import from if the object is not found in + `module_lookup` or if `module_lookup` is not provided. Returns: A function that imports objects from the specified modules. @@ -56,7 +58,7 @@ def create_importer( all_module_lookup = {**(deprecated_lookups or {}), **(module_lookup or {})} def import_by_name(name: str) -> Any: - """Import stores from langchain_community.""" + """Import stores from `langchain_community`.""" # If not in interactive env, raise warning. if all_module_lookup and name in all_module_lookup: new_module = all_module_lookup[name] diff --git a/libs/langchain/langchain_classic/agents/agent.py b/libs/langchain/langchain_classic/agents/agent.py index 4984573fa08..6c98fa13809 100644 --- a/libs/langchain/langchain_classic/agents/agent.py +++ b/libs/langchain/langchain_classic/agents/agent.py @@ -105,10 +105,7 @@ class BaseSingleActionAgent(BaseModel): @property @abstractmethod def input_keys(self) -> list[str]: - """Return the input keys. - - :meta private: - """ + """Return the input keys.""" def return_stopped_response( self, @@ -124,7 +121,7 @@ class BaseSingleActionAgent(BaseModel): along with observations. Returns: - AgentFinish: Agent finish object. + Agent finish object. Raises: ValueError: If `early_stopping_method` is not supported. @@ -155,7 +152,7 @@ class BaseSingleActionAgent(BaseModel): kwargs: Additional arguments. Returns: - BaseSingleActionAgent: Agent object. + Agent object. """ raise NotImplementedError @@ -169,7 +166,7 @@ class BaseSingleActionAgent(BaseModel): """Return dictionary representation of agent. Returns: - Dict: Dictionary representation of agent. + Dictionary representation of agent. """ _dict = super().model_dump() try: @@ -233,7 +230,7 @@ class BaseMultiActionAgent(BaseModel): """Get allowed tools. Returns: - list[str] | None: Allowed tools. + Allowed tools. """ return None @@ -278,10 +275,7 @@ class BaseMultiActionAgent(BaseModel): @property @abstractmethod def input_keys(self) -> list[str]: - """Return the input keys. - - :meta private: - """ + """Return the input keys.""" def return_stopped_response( self, @@ -297,7 +291,7 @@ class BaseMultiActionAgent(BaseModel): along with observations. Returns: - AgentFinish: Agent finish object. + Agent finish object. Raises: ValueError: If `early_stopping_method` is not supported. @@ -329,7 +323,7 @@ class BaseMultiActionAgent(BaseModel): Raises: NotImplementedError: If agent does not support saving. - ValueError: If file_path is not json or yaml. + ValueError: If `file_path` is not json or yaml. Example: ```python @@ -388,8 +382,7 @@ class MultiActionAgentOutputParser( text: Text to parse. Returns: - Union[List[AgentAction], AgentFinish]: - List of agent actions or agent finish. + List of agent actions or agent finish. """ @@ -404,8 +397,8 @@ class RunnableAgent(BaseSingleActionAgent): """Whether to stream from the runnable or not. If `True` then underlying LLM is invoked in a streaming fashion to make it possible - to get access to the individual LLM tokens when using stream_log with the Agent - Executor. If `False` then LLM is invoked in a non-streaming fashion and + to get access to the individual LLM tokens when using stream_log with the + `AgentExecutor`. If `False` then LLM is invoked in a non-streaming fashion and individual LLM tokens will not be available in stream_log. """ @@ -446,7 +439,7 @@ class RunnableAgent(BaseSingleActionAgent): # Use streaming to make sure that the underlying LLM is invoked in a # streaming # fashion to make it possible to get access to the individual LLM tokens - # when using stream_log with the Agent Executor. + # when using stream_log with the AgentExecutor. # Because the response from the plan is not a generator, we need to # accumulate the output into final output and return that. for chunk in self.runnable.stream(inputs, config={"callbacks": callbacks}): @@ -482,7 +475,7 @@ class RunnableAgent(BaseSingleActionAgent): # Use streaming to make sure that the underlying LLM is invoked in a # streaming # fashion to make it possible to get access to the individual LLM tokens - # when using stream_log with the Agent Executor. + # when using stream_log with the AgentExecutor. # Because the response from the plan is not a generator, we need to # accumulate the output into final output and return that. async for chunk in self.runnable.astream( @@ -512,8 +505,8 @@ class RunnableMultiActionAgent(BaseMultiActionAgent): """Whether to stream from the runnable or not. If `True` then underlying LLM is invoked in a streaming fashion to make it possible - to get access to the individual LLM tokens when using stream_log with the Agent - Executor. If `False` then LLM is invoked in a non-streaming fashion and + to get access to the individual LLM tokens when using stream_log with the + `AgentExecutor`. If `False` then LLM is invoked in a non-streaming fashion and individual LLM tokens will not be available in stream_log. """ @@ -558,7 +551,7 @@ class RunnableMultiActionAgent(BaseMultiActionAgent): # Use streaming to make sure that the underlying LLM is invoked in a # streaming # fashion to make it possible to get access to the individual LLM tokens - # when using stream_log with the Agent Executor. + # when using stream_log with the AgentExecutor. # Because the response from the plan is not a generator, we need to # accumulate the output into final output and return that. for chunk in self.runnable.stream(inputs, config={"callbacks": callbacks}): @@ -594,7 +587,7 @@ class RunnableMultiActionAgent(BaseMultiActionAgent): # Use streaming to make sure that the underlying LLM is invoked in a # streaming # fashion to make it possible to get access to the individual LLM tokens - # when using stream_log with the Agent Executor. + # when using stream_log with the AgentExecutor. # Because the response from the plan is not a generator, we need to # accumulate the output into final output and return that. async for chunk in self.runnable.astream( @@ -812,7 +805,7 @@ class Agent(BaseSingleActionAgent): **kwargs: User inputs. Returns: - Dict[str, Any]: Full inputs for the LLMChain. + Full inputs for the LLMChain. """ thoughts = self._construct_scratchpad(intermediate_steps) new_inputs = {"agent_scratchpad": thoughts, "stop": self._stop} @@ -820,10 +813,7 @@ class Agent(BaseSingleActionAgent): @property def input_keys(self) -> list[str]: - """Return the input keys. - - :meta private: - """ + """Return the input keys.""" return list(set(self.llm_chain.input_keys) - {"agent_scratchpad"}) @model_validator(mode="after") @@ -834,11 +824,11 @@ class Agent(BaseSingleActionAgent): values: Values to validate. Returns: - Dict: Validated values. + Validated values. Raises: ValueError: If `agent_scratchpad` is not in prompt.input_variables - and prompt is not a FewShotPromptTemplate or a PromptTemplate. + and prompt is not a FewShotPromptTemplate or a PromptTemplate. """ prompt = self.llm_chain.prompt if "agent_scratchpad" not in prompt.input_variables: @@ -875,7 +865,7 @@ class Agent(BaseSingleActionAgent): tools: Tools to use. Returns: - BasePromptTemplate: Prompt template. + Prompt template. """ @classmethod @@ -910,7 +900,7 @@ class Agent(BaseSingleActionAgent): kwargs: Additional arguments. Returns: - Agent: Agent object. + Agent object. """ cls._validate_tools(tools) llm_chain = LLMChain( @@ -942,7 +932,7 @@ class Agent(BaseSingleActionAgent): **kwargs: User inputs. Returns: - AgentFinish: Agent finish object. + Agent finish object. Raises: ValueError: If `early_stopping_method` is not in ['force', 'generate']. @@ -1054,9 +1044,9 @@ class AgentExecutor(Chain): Defaults to `False`, which raises the error. If `true`, the error will be sent back to the LLM as an observation. If a string, the string itself will be sent to the LLM as an observation. - If a callable function, the function will be called with the exception - as an argument, and the result of that function will be passed to the agent - as an observation. + If a callable function, the function will be called with the exception as an + argument, and the result of that function will be passed to the agent as an + observation. """ trim_intermediate_steps: ( int | Callable[[list[tuple[AgentAction, str]]], list[tuple[AgentAction, str]]] @@ -1082,7 +1072,7 @@ class AgentExecutor(Chain): kwargs: Additional arguments. Returns: - AgentExecutor: Agent executor object. + Agent executor object. """ return cls( agent=agent, @@ -1099,7 +1089,7 @@ class AgentExecutor(Chain): values: Values to validate. Returns: - Dict: Validated values. + Validated values. Raises: ValueError: If allowed tools are different than provided tools. @@ -1126,7 +1116,7 @@ class AgentExecutor(Chain): values: Values to validate. Returns: - Dict: Validated values. + Validated values. """ agent = values.get("agent") if agent and isinstance(agent, Runnable): @@ -1209,7 +1199,7 @@ class AgentExecutor(Chain): async_: Whether to run async. (Ignored) Returns: - AgentExecutorIterator: Agent executor iterator object. + Agent executor iterator object. """ return AgentExecutorIterator( self, @@ -1221,18 +1211,12 @@ class AgentExecutor(Chain): @property def input_keys(self) -> list[str]: - """Return the input keys. - - :meta private: - """ + """Return the input keys.""" return self._action_agent.input_keys @property def output_keys(self) -> list[str]: - """Return the singular output key. - - :meta private: - """ + """Return the singular output key.""" if self.return_intermediate_steps: return [*self._action_agent.return_values, "intermediate_steps"] return self._action_agent.return_values @@ -1244,7 +1228,7 @@ class AgentExecutor(Chain): name: Name of tool. Returns: - BaseTool: Tool object. + Tool object. """ return {tool.name: tool for tool in self.tools}[name] @@ -1759,7 +1743,7 @@ class AgentExecutor(Chain): kwargs: Additional arguments. Yields: - AddableDict: Addable dictionary. + Addable dictionary. """ config = ensure_config(config) iterator = AgentExecutorIterator( @@ -1790,7 +1774,7 @@ class AgentExecutor(Chain): kwargs: Additional arguments. Yields: - AddableDict: Addable dictionary. + Addable dictionary. """ config = ensure_config(config) iterator = AgentExecutorIterator( diff --git a/libs/langchain/langchain_classic/agents/agent_iterator.py b/libs/langchain/langchain_classic/agents/agent_iterator.py index f9ed911c91e..138b58f69c2 100644 --- a/libs/langchain/langchain_classic/agents/agent_iterator.py +++ b/libs/langchain/langchain_classic/agents/agent_iterator.py @@ -53,9 +53,9 @@ class AgentExecutorIterator: include_run_info: bool = False, yield_actions: bool = False, ): - """Initialize the AgentExecutorIterator. + """Initialize the `AgentExecutorIterator`. - Initialize the AgentExecutorIterator with the given AgentExecutor, + Initialize the `AgentExecutorIterator` with the given `AgentExecutor`, inputs, and optional callbacks. Args: @@ -91,7 +91,7 @@ class AgentExecutorIterator: @property def inputs(self) -> dict[str, str]: - """The inputs to the AgentExecutor.""" + """The inputs to the `AgentExecutor`.""" return self._inputs @inputs.setter @@ -100,7 +100,7 @@ class AgentExecutorIterator: @property def agent_executor(self) -> AgentExecutor: - """The AgentExecutor to iterate over.""" + """The `AgentExecutor` to iterate over.""" return self._agent_executor @agent_executor.setter @@ -171,7 +171,7 @@ class AgentExecutorIterator: return prepared_outputs def __iter__(self: AgentExecutorIterator) -> Iterator[AddableDict]: - """Create an async iterator for the AgentExecutor.""" + """Create an async iterator for the `AgentExecutor`.""" logger.debug("Initialising AgentExecutorIterator") self.reset() callback_manager = CallbackManager.configure( @@ -235,7 +235,7 @@ class AgentExecutorIterator: yield self._stop(run_manager) async def __aiter__(self) -> AsyncIterator[AddableDict]: - """Create an async iterator for the AgentExecutor. + """Create an async iterator for the `AgentExecutor`. N.B. __aiter__ must be a normal method, so need to initialize async run manager on first __anext__ call where we can await it. diff --git a/libs/langchain/langchain_classic/agents/agent_toolkits/__init__.py b/libs/langchain/langchain_classic/agents/agent_toolkits/__init__.py index 6275d049256..e09e993933b 100644 --- a/libs/langchain/langchain_classic/agents/agent_toolkits/__init__.py +++ b/libs/langchain/langchain_classic/agents/agent_toolkits/__init__.py @@ -11,7 +11,7 @@ When developing an application, developers should inspect the capabilities and permissions of the tools that underlie the given agent toolkit, and determine whether permissions of the given toolkit are appropriate for the application. -See [Security](https://python.langchain.com/docs/security) for more information. +See https://docs.langchain.com/oss/python/security-policy for more information. """ from pathlib import Path diff --git a/libs/langchain/langchain_classic/agents/agent_toolkits/conversational_retrieval/openai_functions.py b/libs/langchain/langchain_classic/agents/agent_toolkits/conversational_retrieval/openai_functions.py index e95a233ec2a..3c054a3ddb8 100644 --- a/libs/langchain/langchain_classic/agents/agent_toolkits/conversational_retrieval/openai_functions.py +++ b/libs/langchain/langchain_classic/agents/agent_toolkits/conversational_retrieval/openai_functions.py @@ -1,7 +1,6 @@ from typing import Any from langchain_core.language_models import BaseLanguageModel -from langchain_core.memory import BaseMemory from langchain_core.messages import SystemMessage from langchain_core.prompts.chat import MessagesPlaceholder from langchain_core.tools import BaseTool @@ -11,6 +10,7 @@ from langchain_classic.agents.openai_functions_agent.agent_token_buffer_memory i AgentTokenBufferMemory, ) from langchain_classic.agents.openai_functions_agent.base import OpenAIFunctionsAgent +from langchain_classic.base_memory import BaseMemory from langchain_classic.memory.token_buffer import ConversationTokenBufferMemory @@ -37,7 +37,7 @@ def create_conversational_retrieval_agent( """A convenience method for creating a conversational retrieval agent. Args: - llm: The language model to use, should be ChatOpenAI + llm: The language model to use, should be `ChatOpenAI` tools: A list of tools the agent has access to remember_intermediate_steps: Whether the agent should remember intermediate steps or not. Intermediate steps refer to prior action/observation @@ -47,11 +47,9 @@ def create_conversational_retrieval_agent( memory_key: The name of the memory key in the prompt. system_message: The system message to use. By default, a basic one will be used. - verbose: Whether or not the final AgentExecutor should be verbose or not, - defaults to False. + verbose: Whether or not the final AgentExecutor should be verbose or not. max_token_limit: The max number of tokens to keep around in memory. - Defaults to 2000. - **kwargs: Additional keyword arguments to pass to the AgentExecutor. + **kwargs: Additional keyword arguments to pass to the `AgentExecutor`. Returns: An agent executor initialized appropriately diff --git a/libs/langchain/langchain_classic/agents/agent_toolkits/vectorstore/base.py b/libs/langchain/langchain_classic/agents/agent_toolkits/vectorstore/base.py index 95587613af6..9a97eb32beb 100644 --- a/libs/langchain/langchain_classic/agents/agent_toolkits/vectorstore/base.py +++ b/libs/langchain/langchain_classic/agents/agent_toolkits/vectorstore/base.py @@ -58,7 +58,7 @@ def create_vectorstore_agent( from langchain_openai import ChatOpenAI, OpenAIEmbeddings from langgraph.prebuilt import create_react_agent - llm = ChatOpenAI(model="gpt-4o-mini", temperature=0) + model = ChatOpenAI(model="gpt-4o-mini", temperature=0) vector_store = InMemoryVectorStore.from_texts( [ @@ -74,7 +74,7 @@ def create_vectorstore_agent( "Fetches information about pets.", ) - agent = create_react_agent(llm, [tool]) + agent = create_react_agent(model, [tool]) for step in agent.stream( {"messages": [("human", "What are dogs known for?")]}, @@ -86,13 +86,12 @@ def create_vectorstore_agent( Args: llm: LLM that will be used by the agent toolkit: Set of tools for the agent - callback_manager: Object to handle the callback [ Defaults to `None`. ] - prefix: The prefix prompt for the agent. If not provided uses default PREFIX. + callback_manager: Object to handle the callback + prefix: The prefix prompt for the agent. verbose: If you want to see the content of the scratchpad. - [ Defaults to `False` ] agent_executor_kwargs: If there is any other parameter you want to send to the - agent. [ Defaults to `None` ] - kwargs: Additional named parameters to pass to the ZeroShotAgent. + agent. + kwargs: Additional named parameters to pass to the `ZeroShotAgent`. Returns: Returns a callable AgentExecutor object. @@ -156,7 +155,7 @@ def create_vectorstore_router_agent( from langchain_openai import ChatOpenAI, OpenAIEmbeddings from langgraph.prebuilt import create_react_agent - llm = ChatOpenAI(model="gpt-4o-mini", temperature=0) + model = ChatOpenAI(model="gpt-4o-mini", temperature=0) pet_vector_store = InMemoryVectorStore.from_texts( [ @@ -187,7 +186,7 @@ def create_vectorstore_router_agent( ), ] - agent = create_react_agent(llm, tools) + agent = create_react_agent(model, tools) for step in agent.stream( {"messages": [("human", "Tell me about carrots.")]}, @@ -200,17 +199,16 @@ def create_vectorstore_router_agent( llm: LLM that will be used by the agent toolkit: Set of tools for the agent which have routing capability with multiple vector stores - callback_manager: Object to handle the callback [ Defaults to `None`. ] + callback_manager: Object to handle the callback prefix: The prefix prompt for the router agent. - If not provided uses default ROUTER_PREFIX. + If not provided uses default `ROUTER_PREFIX`. verbose: If you want to see the content of the scratchpad. - [ Defaults to `False` ] agent_executor_kwargs: If there is any other parameter you want to send to the - agent. [ Defaults to `None` ] - kwargs: Additional named parameters to pass to the ZeroShotAgent. + agent. + kwargs: Additional named parameters to pass to the `ZeroShotAgent`. Returns: - Returns a callable AgentExecutor object. + Returns a callable `AgentExecutor` object. Either you can call it or use run method with the query to get the response. """ diff --git a/libs/langchain/langchain_classic/agents/agent_toolkits/vectorstore/toolkit.py b/libs/langchain/langchain_classic/agents/agent_toolkits/vectorstore/toolkit.py index 3d2df64ffcc..56940d8564e 100644 --- a/libs/langchain/langchain_classic/agents/agent_toolkits/vectorstore/toolkit.py +++ b/libs/langchain/langchain_classic/agents/agent_toolkits/vectorstore/toolkit.py @@ -8,7 +8,7 @@ from pydantic import BaseModel, ConfigDict, Field class VectorStoreInfo(BaseModel): - """Information about a VectorStore.""" + """Information about a `VectorStore`.""" vectorstore: VectorStore = Field(exclude=True) name: str @@ -20,7 +20,7 @@ class VectorStoreInfo(BaseModel): class VectorStoreToolkit(BaseToolkit): - """Toolkit for interacting with a Vector Store.""" + """Toolkit for interacting with a `VectorStore`.""" vectorstore_info: VectorStoreInfo = Field(exclude=True) llm: BaseLanguageModel diff --git a/libs/langchain/langchain_classic/agents/agent_types.py b/libs/langchain/langchain_classic/agents/agent_types.py index 073cb62cd3d..5bff78f3054 100644 --- a/libs/langchain/langchain_classic/agents/agent_types.py +++ b/libs/langchain/langchain_classic/agents/agent_types.py @@ -13,10 +13,7 @@ from langchain_classic._api.deprecation import AGENT_DEPRECATION_WARNING removal="1.0", ) class AgentType(str, Enum): - """An enum for agent types. - - See documentation: https://python.langchain.com/api_reference/langchain/agents/langchain.agents.agent_types.AgentType.html - """ + """An enum for agent types.""" ZERO_SHOT_REACT_DESCRIPTION = "zero-shot-react-description" """A zero shot agent that does a reasoning step before acting.""" diff --git a/libs/langchain/langchain_classic/agents/chat/base.py b/libs/langchain/langchain_classic/agents/chat/base.py index 136d4f1f4f0..c2f5f9576fa 100644 --- a/libs/langchain/langchain_classic/agents/chat/base.py +++ b/libs/langchain/langchain_classic/agents/chat/base.py @@ -94,13 +94,10 @@ class ChatAgent(Agent): Args: tools: A list of tools. system_message_prefix: The system message prefix. - Default is SYSTEM_MESSAGE_PREFIX. system_message_suffix: The system message suffix. - Default is SYSTEM_MESSAGE_SUFFIX. - human_message: The human message. Default is HUMAN_MESSAGE. + human_message: The `HumanMessage`. format_instructions: The format instructions. - Default is FORMAT_INSTRUCTIONS. - input_variables: The input variables. Default is None. + input_variables: The input variables. Returns: A prompt template. @@ -141,16 +138,13 @@ class ChatAgent(Agent): Args: llm: The language model. tools: A list of tools. - callback_manager: The callback manager. Default is None. - output_parser: The output parser. Default is None. + callback_manager: The callback manager. + output_parser: The output parser. system_message_prefix: The system message prefix. - Default is SYSTEM_MESSAGE_PREFIX. system_message_suffix: The system message suffix. - Default is SYSTEM_MESSAGE_SUFFIX. - human_message: The human message. Default is HUMAN_MESSAGE. + human_message: The `HumanMessage`. format_instructions: The format instructions. - Default is FORMAT_INSTRUCTIONS. - input_variables: The input variables. Default is None. + input_variables: The input variables. kwargs: Additional keyword arguments. Returns: diff --git a/libs/langchain/langchain_classic/agents/conversational/base.py b/libs/langchain/langchain_classic/agents/conversational/base.py index f198bb733df..84afe3313d6 100644 --- a/libs/langchain/langchain_classic/agents/conversational/base.py +++ b/libs/langchain/langchain_classic/agents/conversational/base.py @@ -87,15 +87,13 @@ class ConversationalAgent(Agent): Args: tools: List of tools the agent will have access to, used to format the prompt. - prefix: String to put before the list of tools. Defaults to PREFIX. - suffix: String to put after the list of tools. Defaults to SUFFIX. - format_instructions: Instructions on how to use the tools. Defaults to - FORMAT_INSTRUCTIONS - ai_prefix: String to use before AI output. Defaults to "AI". + prefix: String to put before the list of tools. + suffix: String to put after the list of tools. + format_instructions: Instructions on how to use the tools. + ai_prefix: String to use before AI output. human_prefix: String to use before human output. - Defaults to "Human". input_variables: List of input variables the final prompt will expect. - Defaults to ["input", "chat_history", "agent_scratchpad"]. + Defaults to `["input", "chat_history", "agent_scratchpad"]`. Returns: A PromptTemplate with the template assembled from the pieces here. @@ -139,16 +137,14 @@ class ConversationalAgent(Agent): Args: llm: The language model to use. tools: A list of tools to use. - callback_manager: The callback manager to use. Default is None. - output_parser: The output parser to use. Default is None. - prefix: The prefix to use in the prompt. Default is PREFIX. - suffix: The suffix to use in the prompt. Default is SUFFIX. + callback_manager: The callback manager to use. + output_parser: The output parser to use. + prefix: The prefix to use in the prompt. + suffix: The suffix to use in the prompt. format_instructions: The format instructions to use. - Default is FORMAT_INSTRUCTIONS. - ai_prefix: The prefix to use before AI output. Default is "AI". + ai_prefix: The prefix to use before AI output. human_prefix: The prefix to use before human output. - Default is "Human". - input_variables: The input variables to use. Default is None. + input_variables: The input variables to use. **kwargs: Any additional keyword arguments to pass to the agent. Returns: diff --git a/libs/langchain/langchain_classic/agents/conversational_chat/base.py b/libs/langchain/langchain_classic/agents/conversational_chat/base.py index b8943bdb356..d0f17707018 100644 --- a/libs/langchain/langchain_classic/agents/conversational_chat/base.py +++ b/libs/langchain/langchain_classic/agents/conversational_chat/base.py @@ -87,15 +87,13 @@ class ConversationalChatAgent(Agent): Args: tools: The tools to use. - system_message: The system message to use. - Defaults to the PREFIX. - human_message: The human message to use. - Defaults to the SUFFIX. - input_variables: The input variables to use. Defaults to `None`. - output_parser: The output parser to use. Defaults to `None`. + system_message: The `SystemMessage` to use. + human_message: The `HumanMessage` to use. + input_variables: The input variables to use. + output_parser: The output parser to use. Returns: - A PromptTemplate. + A `PromptTemplate`. """ tool_strings = "\n".join( [f"> {tool.name}: {tool.description}" for tool in tools], @@ -150,11 +148,11 @@ class ConversationalChatAgent(Agent): Args: llm: The language model to use. tools: A list of tools to use. - callback_manager: The callback manager to use. Default is None. - output_parser: The output parser to use. Default is None. - system_message: The system message to use. Default is PREFIX. - human_message: The human message to use. Default is SUFFIX. - input_variables: The input variables to use. Default is None. + callback_manager: The callback manager to use. + output_parser: The output parser to use. + system_message: The `SystemMessage` to use. + human_message: The `HumanMessage` to use. + input_variables: The input variables to use. **kwargs: Any additional arguments. Returns: diff --git a/libs/langchain/langchain_classic/agents/format_scratchpad/log.py b/libs/langchain/langchain_classic/agents/format_scratchpad/log.py index bf24a96a67a..5bef3f0cec2 100644 --- a/libs/langchain/langchain_classic/agents/format_scratchpad/log.py +++ b/libs/langchain/langchain_classic/agents/format_scratchpad/log.py @@ -11,12 +11,10 @@ def format_log_to_str( Args: intermediate_steps: List of tuples of AgentAction and observation strings. observation_prefix: Prefix to append the observation with. - Defaults to "Observation: ". llm_prefix: Prefix to append the llm call with. - Defaults to "Thought: ". Returns: - str: The scratchpad. + The scratchpad. """ thoughts = "" for action, observation in intermediate_steps: diff --git a/libs/langchain/langchain_classic/agents/format_scratchpad/log_to_messages.py b/libs/langchain/langchain_classic/agents/format_scratchpad/log_to_messages.py index a193e37ae38..5bc338831e9 100644 --- a/libs/langchain/langchain_classic/agents/format_scratchpad/log_to_messages.py +++ b/libs/langchain/langchain_classic/agents/format_scratchpad/log_to_messages.py @@ -11,10 +11,10 @@ def format_log_to_messages( Args: intermediate_steps: List of tuples of AgentAction and observation strings. template_tool_response: Template to format the observation with. - Defaults to "{observation}". + Defaults to `"{observation}"`. Returns: - List[BaseMessage]: The scratchpad. + The scratchpad. """ thoughts: list[BaseMessage] = [] for action, observation in intermediate_steps: diff --git a/libs/langchain/langchain_classic/agents/initialize.py b/libs/langchain/langchain_classic/agents/initialize.py index c1df318ab60..c01e0e441ea 100644 --- a/libs/langchain/langchain_classic/agents/initialize.py +++ b/libs/langchain/langchain_classic/agents/initialize.py @@ -38,14 +38,14 @@ def initialize_agent( tools: List of tools this agent has access to. llm: Language model to use as the agent. agent: Agent type to use. If `None` and agent_path is also None, will default - to AgentType.ZERO_SHOT_REACT_DESCRIPTION. Defaults to `None`. + to AgentType.ZERO_SHOT_REACT_DESCRIPTION. callback_manager: CallbackManager to use. Global callback manager is used if - not provided. Defaults to `None`. + not provided. agent_path: Path to serialized agent to use. If `None` and agent is also None, - will default to AgentType.ZERO_SHOT_REACT_DESCRIPTION. Defaults to `None`. + will default to AgentType.ZERO_SHOT_REACT_DESCRIPTION. agent_kwargs: Additional keyword arguments to pass to the underlying agent. - Defaults to `None`. - tags: Tags to apply to the traced runs. Defaults to `None`. + + tags: Tags to apply to the traced runs. kwargs: Additional keyword arguments passed to the agent executor. Returns: diff --git a/libs/langchain/langchain_classic/agents/json_chat/base.py b/libs/langchain/langchain_classic/agents/json_chat/base.py index 2c8091601e8..90dd72ad753 100644 --- a/libs/langchain/langchain_classic/agents/json_chat/base.py +++ b/libs/langchain/langchain_classic/agents/json_chat/base.py @@ -30,13 +30,12 @@ def create_json_chat_agent( If `False`, does not add a stop token. If a list of str, uses the provided list as the stop tokens. - Default is True. You may to set this to False if the LLM you are using - does not support stop sequences. + You may to set this to False if the LLM you are using does not support stop + sequences. tools_renderer: This controls how the tools are converted into a string and - then passed into the LLM. Default is `render_text_description`. + then passed into the LLM. template_tool_response: Template prompt that uses the tool response (observation) to make the LLM generate the next action to take. - Default is TEMPLATE_TOOL_RESPONSE. Returns: A Runnable sequence representing an agent. It takes as input all the same input @@ -51,7 +50,7 @@ def create_json_chat_agent( Example: ```python from langchain_classic import hub - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI from langchain_classic.agents import AgentExecutor, create_json_chat_agent prompt = hub.pull("hwchase17/react-chat-json") diff --git a/libs/langchain/langchain_classic/agents/mrkl/base.py b/libs/langchain/langchain_classic/agents/mrkl/base.py index 799b7337022..6ed82f50e72 100644 --- a/libs/langchain/langchain_classic/agents/mrkl/base.py +++ b/libs/langchain/langchain_classic/agents/mrkl/base.py @@ -93,12 +93,11 @@ class ZeroShotAgent(Agent): Args: tools: List of tools the agent will have access to, used to format the prompt. - prefix: String to put before the list of tools. Defaults to PREFIX. - suffix: String to put after the list of tools. Defaults to SUFFIX. + prefix: String to put before the list of tools. + suffix: String to put after the list of tools. format_instructions: Instructions on how to use the tools. - Defaults to FORMAT_INSTRUCTIONS input_variables: List of input variables the final prompt will expect. - Defaults to `None`. + Returns: A PromptTemplate with the template assembled from the pieces here. @@ -129,13 +128,12 @@ class ZeroShotAgent(Agent): Args: llm: The LLM to use as the agent LLM. tools: The tools to use. - callback_manager: The callback manager to use. Defaults to `None`. - output_parser: The output parser to use. Defaults to `None`. - prefix: The prefix to use. Defaults to PREFIX. - suffix: The suffix to use. Defaults to SUFFIX. + callback_manager: The callback manager to use. + output_parser: The output parser to use. + prefix: The prefix to use. + suffix: The suffix to use. format_instructions: The format instructions to use. - Defaults to FORMAT_INSTRUCTIONS. - input_variables: The input variables to use. Defaults to `None`. + input_variables: The input variables to use. kwargs: Additional parameters to pass to the agent. """ cls._validate_tools(tools) diff --git a/libs/langchain/langchain_classic/agents/openai_assistant/base.py b/libs/langchain/langchain_classic/agents/openai_assistant/base.py index ed428e4ed16..472c3ad27a1 100644 --- a/libs/langchain/langchain_classic/agents/openai_assistant/base.py +++ b/libs/langchain/langchain_classic/agents/openai_assistant/base.py @@ -231,15 +231,15 @@ class OpenAIAssistantRunnable(RunnableSerializable[dict, OutputType]): """ client: Any = Field(default_factory=_get_openai_client) - """OpenAI or AzureOpenAI client.""" + """`OpenAI` or `AzureOpenAI` client.""" async_client: Any = None - """OpenAI or AzureOpenAI async client.""" + """`OpenAI` or `AzureOpenAI` async client.""" assistant_id: str """OpenAI assistant id.""" check_every_ms: float = 1_000.0 """Frequency with which to check run progress in ms.""" as_agent: bool = False - """Use as a LangChain agent, compatible with the AgentExecutor.""" + """Use as a LangChain agent, compatible with the `AgentExecutor`.""" @model_validator(mode="after") def _validate_async_client(self) -> Self: @@ -314,7 +314,7 @@ class OpenAIAssistantRunnable(RunnableSerializable[dict, OutputType]): run_metadata: Metadata to associate with new run. attachments: A list of files attached to the message, and the tools they should be added to. - config: Runnable config. Defaults to `None`. + config: Runnable config. **kwargs: Additional arguments. Returns: @@ -446,7 +446,7 @@ class OpenAIAssistantRunnable(RunnableSerializable[dict, OutputType]): max_completion_tokens: Allow setting max_completion_tokens for this run. max_prompt_tokens: Allow setting max_prompt_tokens for this run. run_metadata: Metadata to associate with new run. - config: Runnable config. Defaults to `None`. + config: Runnable config. kwargs: Additional arguments. Returns: diff --git a/libs/langchain/langchain_classic/agents/openai_functions_agent/agent_token_buffer_memory.py b/libs/langchain/langchain_classic/agents/openai_functions_agent/agent_token_buffer_memory.py index 4f8e49f0deb..f6f1307cfc9 100644 --- a/libs/langchain/langchain_classic/agents/openai_functions_agent/agent_token_buffer_memory.py +++ b/libs/langchain/langchain_classic/agents/openai_functions_agent/agent_token_buffer_memory.py @@ -17,18 +17,17 @@ class AgentTokenBufferMemory(BaseChatMemory): """Memory used to save agent output AND intermediate steps. Args: - human_prefix: Prefix for human messages. Default is "Human". - ai_prefix: Prefix for AI messages. Default is "AI". + human_prefix: Prefix for human messages. + ai_prefix: Prefix for AI messages. llm: Language model. - memory_key: Key to save memory under. Default is "history". + memory_key: Key to save memory under. max_token_limit: Maximum number of tokens to keep in the buffer. Once the buffer exceeds this many tokens, the oldest - messages will be pruned. Default is 12000. - return_messages: Whether to return messages. Default is True. - output_key: Key to save output under. Default is "output". + messages will be pruned. + return_messages: Whether to return messages. + output_key: Key to save output under. intermediate_steps_key: Key to save intermediate steps under. - Default is "intermediate_steps". - format_as_tools: Whether to format as tools. Default is False. + format_as_tools: Whether to format as tools. """ human_prefix: str = "Human" @@ -50,10 +49,7 @@ class AgentTokenBufferMemory(BaseChatMemory): @property def memory_variables(self) -> list[str]: - """Always return list of memory variables. - - :meta private: - """ + """Always return list of memory variables.""" return [self.memory_key] @override diff --git a/libs/langchain/langchain_classic/agents/openai_functions_agent/base.py b/libs/langchain/langchain_classic/agents/openai_functions_agent/base.py index c6907b7eeba..6a4cb091df8 100644 --- a/libs/langchain/langchain_classic/agents/openai_functions_agent/base.py +++ b/libs/langchain/langchain_classic/agents/openai_functions_agent/base.py @@ -40,15 +40,14 @@ class OpenAIFunctionsAgent(BaseSingleActionAgent): """An Agent driven by OpenAIs function powered API. Args: - llm: This should be an instance of ChatOpenAI, specifically a model + llm: This should be an instance of `ChatOpenAI`, specifically a model that supports using `functions`. tools: The tools this agent has access to. prompt: The prompt for this agent, should support agent_scratchpad as one of the variables. For an easy way to construct this prompt, use `OpenAIFunctionsAgent.create_prompt(...)` output_parser: The output parser for this agent. Should be an instance of - OpenAIFunctionsAgentOutputParser. - Defaults to OpenAIFunctionsAgentOutputParser. + `OpenAIFunctionsAgentOutputParser`. """ llm: BaseLanguageModel @@ -106,14 +105,14 @@ class OpenAIFunctionsAgent(BaseSingleActionAgent): Args: intermediate_steps: Steps the LLM has taken to date, along with observations. - callbacks: Callbacks to use. Defaults to `None`. - with_functions: Whether to use functions. Defaults to `True`. + callbacks: Callbacks to use. + with_functions: Whether to use functions. **kwargs: User inputs. Returns: Action specifying what tool to use. - If the agent is finished, returns an AgentFinish. - If the agent is not finished, returns an AgentAction. + If the agent is finished, returns an `AgentFinish`. + If the agent is not finished, returns an `AgentAction`. """ agent_scratchpad = format_to_openai_function_messages(intermediate_steps) selected_inputs = { @@ -146,7 +145,7 @@ class OpenAIFunctionsAgent(BaseSingleActionAgent): Args: intermediate_steps: Steps the LLM has taken to date, along with observations. - callbacks: Callbacks to use. Defaults to `None`. + callbacks: Callbacks to use. **kwargs: User inputs. Returns: @@ -261,8 +260,8 @@ class OpenAIFunctionsAgent(BaseSingleActionAgent): Args: llm: The LLM to use as the agent. tools: The tools to use. - callback_manager: The callback manager to use. Defaults to `None`. - extra_prompt_messages: Extra prompt messages to use. Defaults to `None`. + callback_manager: The callback manager to use. + extra_prompt_messages: Extra prompt messages to use. system_message: The system message to use. Defaults to a default system message. kwargs: Additional parameters to pass to the agent. @@ -311,7 +310,7 @@ def create_openai_functions_agent( Creating an agent with no memory ```python - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI from langchain_classic.agents import ( AgentExecutor, create_openai_functions_agent, diff --git a/libs/langchain/langchain_classic/agents/openai_functions_multi_agent/base.py b/libs/langchain/langchain_classic/agents/openai_functions_multi_agent/base.py index 3c3383dd451..340a71c21cb 100644 --- a/libs/langchain/langchain_classic/agents/openai_functions_multi_agent/base.py +++ b/libs/langchain/langchain_classic/agents/openai_functions_multi_agent/base.py @@ -212,7 +212,7 @@ class OpenAIMultiFunctionsAgent(BaseMultiActionAgent): Args: intermediate_steps: Steps the LLM has taken to date, along with observations. - callbacks: Callbacks to use. Default is None. + callbacks: Callbacks to use. **kwargs: User inputs. Returns: @@ -243,7 +243,7 @@ class OpenAIMultiFunctionsAgent(BaseMultiActionAgent): Args: intermediate_steps: Steps the LLM has taken to date, along with observations. - callbacks: Callbacks to use. Default is None. + callbacks: Callbacks to use. **kwargs: User inputs. Returns: @@ -275,7 +275,7 @@ class OpenAIMultiFunctionsAgent(BaseMultiActionAgent): system_message: Message to use as the system message that will be the first in the prompt. extra_prompt_messages: Prompt messages that will be placed between the - system message and the new human input. Default is None. + system message and the new human input. Returns: A prompt template to pass into this agent. @@ -313,10 +313,10 @@ class OpenAIMultiFunctionsAgent(BaseMultiActionAgent): Args: llm: The language model to use. tools: A list of tools to use. - callback_manager: The callback manager to use. Default is None. - extra_prompt_messages: Extra prompt messages to use. Default is None. - system_message: The system message to use. - Default is a default system message. + callback_manager: The callback manager to use. + extra_prompt_messages: Extra prompt messages to use. + system_message: The system message to use. Default is a default system + message. kwargs: Additional arguments. """ system_message_ = ( diff --git a/libs/langchain/langchain_classic/agents/openai_tools/base.py b/libs/langchain/langchain_classic/agents/openai_tools/base.py index 891dc97d647..5fa0cf58575 100644 --- a/libs/langchain/langchain_classic/agents/openai_tools/base.py +++ b/libs/langchain/langchain_classic/agents/openai_tools/base.py @@ -40,7 +40,7 @@ def create_openai_tools_agent( Example: ```python from langchain_classic import hub - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI from langchain_classic.agents import ( AgentExecutor, create_openai_tools_agent, diff --git a/libs/langchain/langchain_classic/agents/react/agent.py b/libs/langchain/langchain_classic/agents/react/agent.py index ce56d6ed667..9dce04145c5 100644 --- a/libs/langchain/langchain_classic/agents/react/agent.py +++ b/libs/langchain/langchain_classic/agents/react/agent.py @@ -42,13 +42,13 @@ def create_react_agent( prompt: The prompt to use. See Prompt section below for more. output_parser: AgentOutputParser for parse the LLM output. tools_renderer: This controls how the tools are converted into a string and - then passed into the LLM. Default is `render_text_description`. + then passed into the LLM. stop_sequence: bool or list of str. If `True`, adds a stop token of "Observation:" to avoid hallucinates. If `False`, does not add a stop token. If a list of str, uses the provided list as the stop tokens. - Default is True. You may to set this to False if the LLM you are using + You may to set this to False if the LLM you are using does not support stop sequences. Returns: @@ -59,7 +59,7 @@ def create_react_agent( Examples: ```python from langchain_classic import hub - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI from langchain_classic.agents import AgentExecutor, create_react_agent prompt = hub.pull("hwchase17/react") diff --git a/libs/langchain/langchain_classic/agents/self_ask_with_search/base.py b/libs/langchain/langchain_classic/agents/self_ask_with_search/base.py index caf513a40ba..cdbff7d590e 100644 --- a/libs/langchain/langchain_classic/agents/self_ask_with_search/base.py +++ b/libs/langchain/langchain_classic/agents/self_ask_with_search/base.py @@ -116,7 +116,7 @@ def create_self_ask_with_search_agent( Examples: ```python from langchain_classic import hub - from langchain_community.chat_models import ChatAnthropic + from langchain_anthropic import ChatAnthropic from langchain_classic.agents import ( AgentExecutor, create_self_ask_with_search_agent, diff --git a/libs/langchain/langchain_classic/agents/structured_chat/base.py b/libs/langchain/langchain_classic/agents/structured_chat/base.py index 2a2d9f25aeb..d76de1bc7d9 100644 --- a/libs/langchain/langchain_classic/agents/structured_chat/base.py +++ b/libs/langchain/langchain_classic/agents/structured_chat/base.py @@ -182,10 +182,10 @@ def create_structured_chat_agent( If `False`, does not add a stop token. If a list of str, uses the provided list as the stop tokens. - Default is True. You may to set this to False if the LLM you are using + You may to set this to False if the LLM you are using does not support stop sequences. tools_renderer: This controls how the tools are converted into a string and - then passed into the LLM. Default is `render_text_description`. + then passed into the LLM. Returns: A Runnable sequence representing an agent. It takes as input all the same input @@ -195,7 +195,7 @@ def create_structured_chat_agent( Examples: ```python from langchain_classic import hub - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI from langchain_classic.agents import ( AgentExecutor, create_structured_chat_agent, diff --git a/libs/langchain/langchain_classic/agents/tool_calling_agent/base.py b/libs/langchain/langchain_classic/agents/tool_calling_agent/base.py index 1c8b23ae669..ad7ec028e4e 100644 --- a/libs/langchain/langchain_classic/agents/tool_calling_agent/base.py +++ b/libs/langchain/langchain_classic/agents/tool_calling_agent/base.py @@ -55,7 +55,7 @@ def create_tool_calling_agent( ("placeholder", "{agent_scratchpad}"), ] ) - model = ChatAnthropic(model="claude-3-opus-20240229") + model = ChatAnthropic(model="claude-opus-4-1-20250805") @tool def magic_function(input: int) -> int: @@ -83,11 +83,15 @@ def create_tool_calling_agent( ``` Prompt: - The agent prompt must have an `agent_scratchpad` key that is a `MessagesPlaceholder`. Intermediate agent actions and tool output messages will be passed in here. + Troubleshooting: + - If you encounter `invalid_tool_calls` errors, ensure that your tool + functions return properly formatted responses. Tool outputs should be + serializable to JSON. For custom objects, implement proper __str__ or + to_dict methods. """ missing_vars = {"agent_scratchpad"}.difference( prompt.input_variables + list(prompt.partial_variables), diff --git a/libs/langchain/langchain_classic/agents/xml/base.py b/libs/langchain/langchain_classic/agents/xml/base.py index d06ff88c1ed..5cd81ea4015 100644 --- a/libs/langchain/langchain_classic/agents/xml/base.py +++ b/libs/langchain/langchain_classic/agents/xml/base.py @@ -129,13 +129,13 @@ def create_xml_agent( `tools`: contains descriptions for each tool. `agent_scratchpad`: contains previous agent actions and tool outputs. tools_renderer: This controls how the tools are converted into a string and - then passed into the LLM. Default is `render_text_description`. + then passed into the LLM. stop_sequence: bool or list of str. If `True`, adds a stop token of "" to avoid hallucinates. If `False`, does not add a stop token. If a list of str, uses the provided list as the stop tokens. - Default is True. You may to set this to False if the LLM you are using + You may to set this to False if the LLM you are using does not support stop sequences. Returns: @@ -146,7 +146,7 @@ def create_xml_agent( Example: ```python from langchain_classic import hub - from langchain_community.chat_models import ChatAnthropic + from langchain_anthropic import ChatAnthropic from langchain_classic.agents import AgentExecutor, create_xml_agent prompt = hub.pull("hwchase17/xml-agent-convo") diff --git a/libs/core/langchain_core/memory.py b/libs/langchain/langchain_classic/base_memory.py similarity index 99% rename from libs/core/langchain_core/memory.py rename to libs/langchain/langchain_classic/base_memory.py index ec50cc01e07..eb178bb888c 100644 --- a/libs/core/langchain_core/memory.py +++ b/libs/langchain/langchain_classic/base_memory.py @@ -10,11 +10,10 @@ from __future__ import annotations from abc import ABC, abstractmethod from typing import Any -from pydantic import ConfigDict - from langchain_core._api import deprecated from langchain_core.load.serializable import Serializable from langchain_core.runnables import run_in_executor +from pydantic import ConfigDict @deprecated( diff --git a/libs/langchain/langchain_classic/callbacks/streamlit/__init__.py b/libs/langchain/langchain_classic/callbacks/streamlit/__init__.py index a17acc3c319..a3b68eb384f 100644 --- a/libs/langchain/langchain_classic/callbacks/streamlit/__init__.py +++ b/libs/langchain/langchain_classic/callbacks/streamlit/__init__.py @@ -31,16 +31,15 @@ def StreamlitCallbackHandler( # noqa: N802 max_thought_containers The max number of completed LLM thought containers to show at once. When this threshold is reached, a new thought will cause the oldest thoughts to be - collapsed into a "History" expander. Defaults to 4. + collapsed into a "History" expander. expand_new_thoughts Each LLM "thought" gets its own `st.expander`. This param controls whether that - expander is expanded by default. Defaults to `True`. + expander is expanded by default. collapse_completed_thoughts If `True`, LLM thought expanders will be collapsed when completed. - Defaults to `True`. thought_labeler An optional custom LLMThoughtLabeler instance. If unspecified, the handler - will use the default thought labeling logic. Defaults to `None`. + will use the default thought labeling logic. Returns: ------- diff --git a/libs/langchain/langchain_classic/callbacks/tracers/schemas.py b/libs/langchain/langchain_classic/callbacks/tracers/schemas.py index e8f34027d34..32e6b2e4f13 100644 --- a/libs/langchain/langchain_classic/callbacks/tracers/schemas.py +++ b/libs/langchain/langchain_classic/callbacks/tracers/schemas.py @@ -1,27 +1,5 @@ -from langchain_core.tracers.schemas import ( - BaseRun, - ChainRun, - LLMRun, - Run, - RunTypeEnum, - ToolRun, - TracerSession, - TracerSessionBase, - TracerSessionV1, - TracerSessionV1Base, - TracerSessionV1Create, -) +from langchain_core.tracers.schemas import Run __all__ = [ - "BaseRun", - "ChainRun", - "LLMRun", "Run", - "RunTypeEnum", - "ToolRun", - "TracerSession", - "TracerSessionBase", - "TracerSessionV1", - "TracerSessionV1Base", - "TracerSessionV1Create", ] diff --git a/libs/langchain/langchain_classic/chains/api/base.py b/libs/langchain/langchain_classic/chains/api/base.py index 955e52fd5ee..d0009dca6dd 100644 --- a/libs/langchain/langchain_classic/chains/api/base.py +++ b/libs/langchain/langchain_classic/chains/api/base.py @@ -42,7 +42,7 @@ def _check_in_allowed_domain(url: str, limit_to_domains: Sequence[str]) -> bool: limit_to_domains: The allowed domains. Returns: - True if the URL is in the allowed domains, False otherwise. + `True` if the URL is in the allowed domains, `False` otherwise. """ scheme, domain = _extract_scheme_and_domain(url) @@ -68,8 +68,8 @@ try: class APIChain(Chain): """Chain that makes API calls and summarizes the responses to answer a question. - *Security Note*: This API chain uses the requests toolkit - to make GET, POST, PATCH, PUT, and DELETE requests to an API. + **Security Note**: This API chain uses the requests toolkit + to make `GET`, `POST`, `PATCH`, `PUT`, and `DELETE` requests to an API. Exercise care in who is allowed to use this chain. If exposing to end users, consider that users will be able to make arbitrary @@ -80,7 +80,8 @@ try: Control access to who can submit issue requests using this toolkit and what network access it has. - See https://python.langchain.com/docs/security for more information. + See https://docs.langchain.com/oss/python/security-policy for more + information. !!! note This class is deprecated. See below for a replacement implementation using @@ -90,7 +91,7 @@ try: - Support for both token-by-token and step-by-step streaming; - Support for checkpointing and memory of chat history; - Easier to modify or extend - (e.g., with additional tools, structured responses, etc.) + (e.g., with additional tools, structured responses, etc.) Install LangGraph with: @@ -143,7 +144,7 @@ try: description: Limit the number of results \"\"\" - llm = ChatOpenAI(model="gpt-4o-mini", temperature=0) + model = ChatOpenAI(model="gpt-4o-mini", temperature=0) toolkit = RequestsToolkit( requests_wrapper=TextRequestsWrapper(headers={}), # no auth required allow_dangerous_requests=ALLOW_DANGEROUS_REQUESTS, @@ -152,7 +153,7 @@ try: api_request_chain = ( API_URL_PROMPT.partial(api_docs=api_spec) - | llm.bind_tools(tools, tool_choice="any") + | model.bind_tools(tools, tool_choice="any") ) class ChainState(TypedDict): @@ -169,7 +170,7 @@ try: return {"messages": [response]} async def acall_model(state: ChainState, config: RunnableConfig): - response = await llm.ainvoke(state["messages"], config) + response = await model.ainvoke(state["messages"], config) return {"messages": [response]} graph_builder = StateGraph(ChainState) @@ -196,17 +197,22 @@ try: """ api_request_chain: LLMChain + api_answer_chain: LLMChain + requests_wrapper: TextRequestsWrapper = Field(exclude=True) + api_docs: str - question_key: str = "question" #: :meta private: - output_key: str = "output" #: :meta private: + + question_key: str = "question" + + output_key: str = "output" + limit_to_domains: Sequence[str] | None = Field(default_factory=list) """Use to limit the domains that can be accessed by the API chain. * For example, to limit to just the domain `https://www.example.com`, set `limit_to_domains=["https://www.example.com"]`. - * The default value is an empty tuple, which means that no domains are allowed by default. By design this will raise an error on instantiation. * Use a None if you want to allow all domains by default -- this is not @@ -217,18 +223,12 @@ try: @property def input_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" return [self.question_key] @property def output_keys(self) -> list[str]: - """Expect output key. - - :meta private: - """ + """Expect output key.""" return [self.output_key] @model_validator(mode="after") diff --git a/libs/langchain/langchain_classic/chains/base.py b/libs/langchain/langchain_classic/chains/base.py index 2d1c0b1e028..c91ccf6c688 100644 --- a/libs/langchain/langchain_classic/chains/base.py +++ b/libs/langchain/langchain_classic/chains/base.py @@ -20,7 +20,6 @@ from langchain_core.callbacks import ( CallbackManagerForChainRun, Callbacks, ) -from langchain_core.memory import BaseMemory from langchain_core.outputs import RunInfo from langchain_core.runnables import ( RunnableConfig, @@ -38,6 +37,7 @@ from pydantic import ( ) from typing_extensions import override +from langchain_classic.base_memory import BaseMemory from langchain_classic.schema import RUN_KEY logger = logging.getLogger(__name__) @@ -73,14 +73,14 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC): """ memory: BaseMemory | None = None - """Optional memory object. Defaults to `None`. + """Optional memory object. Memory is a class that gets called at the start and at the end of every chain. At the start, memory loads variables and passes them along in the chain. At the end, it saves any returned variables. There are many different types of memory - please see memory docs for the full catalog.""" callbacks: Callbacks = Field(default=None, exclude=True) - """Optional list of callback handlers (or callback manager). Defaults to `None`. + """Optional list of callback handlers (or callback manager). Callback handlers are called throughout the lifecycle of a call to a chain, starting with on_chain_start, ending with on_chain_end or on_chain_error. Each custom chain can optionally call additional callback methods, see Callback docs @@ -90,13 +90,13 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC): will be printed to the console. Defaults to the global `verbose` value, accessible via `langchain.globals.get_verbose()`.""" tags: list[str] | None = None - """Optional list of tags associated with the chain. Defaults to `None`. + """Optional list of tags associated with the chain. These tags will be associated with each call to this chain, and passed as arguments to the handlers defined in `callbacks`. You can use these to eg identify a specific instance of a chain with its use case. """ - metadata: dict[str, Any] | None = None - """Optional metadata associated with the chain. Defaults to `None`. + metadata: builtins.dict[str, Any] | None = None + """Optional metadata associated with the chain. This metadata will be associated with each call to this chain, and passed as arguments to the handlers defined in `callbacks`. You can use these to eg identify a specific instance of a chain with its use case. @@ -317,9 +317,9 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC): @abstractmethod def _call( self, - inputs: dict[str, Any], + inputs: builtins.dict[str, Any], run_manager: CallbackManagerForChainRun | None = None, - ) -> dict[str, Any]: + ) -> builtins.dict[str, Any]: """Execute the chain. This is a private method that is not user-facing. It is only called within @@ -339,9 +339,9 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC): async def _acall( self, - inputs: dict[str, Any], + inputs: builtins.dict[str, Any], run_manager: AsyncCallbackManagerForChainRun | None = None, - ) -> dict[str, Any]: + ) -> builtins.dict[str, Any]: """Asynchronously execute the chain. This is a private method that is not user-facing. It is only called within @@ -387,14 +387,14 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC): return_only_outputs: Whether to return only outputs in the response. If `True`, only new keys generated by this chain will be returned. If `False`, both input keys and new keys generated by this - chain will be returned. Defaults to `False`. + chain will be returned. callbacks: Callbacks to use for this chain run. These will be called in addition to callbacks passed to the chain during construction, but only these runtime callbacks will propagate to calls to other objects. tags: List of string tags to pass to all callbacks. These will be passed in addition to tags passed to the chain during construction, but only these runtime tags will propagate to calls to other objects. - metadata: Optional metadata associated with the chain. Defaults to `None`. + metadata: Optional metadata associated with the chain. run_name: Optional name for this run of the chain. include_run_info: Whether to include run info in the response. Defaults to False. @@ -439,14 +439,14 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC): return_only_outputs: Whether to return only outputs in the response. If `True`, only new keys generated by this chain will be returned. If `False`, both input keys and new keys generated by this - chain will be returned. Defaults to `False`. + chain will be returned. callbacks: Callbacks to use for this chain run. These will be called in addition to callbacks passed to the chain during construction, but only these runtime callbacks will propagate to calls to other objects. tags: List of string tags to pass to all callbacks. These will be passed in addition to tags passed to the chain during construction, but only these runtime tags will propagate to calls to other objects. - metadata: Optional metadata associated with the chain. Defaults to `None`. + metadata: Optional metadata associated with the chain. run_name: Optional name for this run of the chain. include_run_info: Whether to include run info in the response. Defaults to False. diff --git a/libs/langchain/langchain_classic/chains/combine_documents/base.py b/libs/langchain/langchain_classic/chains/combine_documents/base.py index ee672ea1001..96b6fdf46ed 100644 --- a/libs/langchain/langchain_classic/chains/combine_documents/base.py +++ b/libs/langchain/langchain_classic/chains/combine_documents/base.py @@ -44,8 +44,8 @@ class BaseCombineDocumentsChain(Chain, ABC): that will be longer than the context length). """ - input_key: str = "input_documents" #: :meta private: - output_key: str = "output_text" #: :meta private: + input_key: str = "input_documents" + output_key: str = "output_text" @override def get_input_schema( @@ -69,18 +69,12 @@ class BaseCombineDocumentsChain(Chain, ABC): @property def input_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" return [self.input_key] @property def output_keys(self) -> list[str]: - """Return output key. - - :meta private: - """ + """Return output key.""" return [self.output_key] def prompt_length(self, docs: list[Document], **kwargs: Any) -> int | None: # noqa: ARG002 @@ -234,24 +228,18 @@ class AnalyzeDocumentChain(Chain): ``` """ - input_key: str = "input_document" #: :meta private: + input_key: str = "input_document" text_splitter: TextSplitter = Field(default_factory=RecursiveCharacterTextSplitter) combine_docs_chain: BaseCombineDocumentsChain @property def input_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" return [self.input_key] @property def output_keys(self) -> list[str]: - """Return output key. - - :meta private: - """ + """Return output key.""" return self.combine_docs_chain.output_keys @override diff --git a/libs/langchain/langchain_classic/chains/combine_documents/map_reduce.py b/libs/langchain/langchain_classic/chains/combine_documents/map_reduce.py index 99fdf7bc1be..4c5cf66913b 100644 --- a/libs/langchain/langchain_classic/chains/combine_documents/map_reduce.py +++ b/libs/langchain/langchain_classic/chains/combine_documents/map_reduce.py @@ -44,7 +44,7 @@ class MapReduceDocumentsChain(BaseCombineDocumentsChain): MapReduceDocumentsChain, ) from langchain_core.prompts import PromptTemplate - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI # This controls how each document will be formatted. Specifically, # it will be passed to `format_document` - see that function for more @@ -53,16 +53,16 @@ class MapReduceDocumentsChain(BaseCombineDocumentsChain): input_variables=["page_content"], template="{page_content}" ) document_variable_name = "context" - llm = OpenAI() + model = OpenAI() # The prompt here should take as an input variable the # `document_variable_name` prompt = PromptTemplate.from_template("Summarize this content: {context}") - llm_chain = LLMChain(llm=llm, prompt=prompt) + llm_chain = LLMChain(llm=model, prompt=prompt) # We now define how to combine these summaries reduce_prompt = PromptTemplate.from_template( "Combine these summaries: {context}" ) - reduce_llm_chain = LLMChain(llm=llm, prompt=reduce_prompt) + reduce_llm_chain = LLMChain(llm=model, prompt=reduce_prompt) combine_documents_chain = StuffDocumentsChain( llm_chain=reduce_llm_chain, document_prompt=document_prompt, @@ -79,7 +79,7 @@ class MapReduceDocumentsChain(BaseCombineDocumentsChain): # which is specifically aimed at collapsing documents BEFORE # the final call. prompt = PromptTemplate.from_template("Collapse this content: {context}") - llm_chain = LLMChain(llm=llm, prompt=prompt) + llm_chain = LLMChain(llm=model, prompt=prompt) collapse_documents_chain = StuffDocumentsChain( llm_chain=llm_chain, document_prompt=document_prompt, @@ -125,10 +125,7 @@ class MapReduceDocumentsChain(BaseCombineDocumentsChain): @property def output_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" _output_keys = super().output_keys if self.return_intermediate_steps: _output_keys = [*_output_keys, "intermediate_steps"] diff --git a/libs/langchain/langchain_classic/chains/combine_documents/map_rerank.py b/libs/langchain/langchain_classic/chains/combine_documents/map_rerank.py index a712ab6f06b..d7889c72cf7 100644 --- a/libs/langchain/langchain_classic/chains/combine_documents/map_rerank.py +++ b/libs/langchain/langchain_classic/chains/combine_documents/map_rerank.py @@ -38,11 +38,11 @@ class MapRerankDocumentsChain(BaseCombineDocumentsChain): ```python from langchain_classic.chains import MapRerankDocumentsChain, LLMChain from langchain_core.prompts import PromptTemplate - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI from langchain_classic.output_parsers.regex import RegexParser document_variable_name = "context" - llm = OpenAI() + model = OpenAI() # The prompt here should take as an input variable the # `document_variable_name` # The actual prompt will need to be a lot more complex, this is just @@ -61,7 +61,7 @@ class MapRerankDocumentsChain(BaseCombineDocumentsChain): input_variables=["context"], output_parser=output_parser, ) - llm_chain = LLMChain(llm=llm, prompt=prompt) + llm_chain = LLMChain(llm=model, prompt=prompt) chain = MapRerankDocumentsChain( llm_chain=llm_chain, document_variable_name=document_variable_name, @@ -108,10 +108,7 @@ class MapRerankDocumentsChain(BaseCombineDocumentsChain): @property def output_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" _output_keys = super().output_keys if self.return_intermediate_steps: _output_keys = [*_output_keys, "intermediate_steps"] diff --git a/libs/langchain/langchain_classic/chains/combine_documents/reduce.py b/libs/langchain/langchain_classic/chains/combine_documents/reduce.py index 0f86c389ff0..d8a09dd1344 100644 --- a/libs/langchain/langchain_classic/chains/combine_documents/reduce.py +++ b/libs/langchain/langchain_classic/chains/combine_documents/reduce.py @@ -33,17 +33,18 @@ def split_list_of_docs( token_max: int, **kwargs: Any, ) -> list[list[Document]]: - """Split Documents into subsets that each meet a cumulative length constraint. + """Split `Document` objects to subsets that each meet a cumulative len. constraint. Args: - docs: The full list of Documents. - length_func: Function for computing the cumulative length of a set of Documents. - token_max: The maximum cumulative length of any subset of Documents. + docs: The full list of `Document` objects. + length_func: Function for computing the cumulative length of a set of `Document` + objects. + token_max: The maximum cumulative length of any subset of `Document` objects. **kwargs: Arbitrary additional keyword params to pass to each call of the - length_func. + `length_func`. Returns: - A List[List[Document]]. + A `list[list[Document]]`. """ new_result_doc_list = [] _sub_result_docs = [] @@ -71,18 +72,18 @@ def collapse_docs( """Execute a collapse function on a set of documents and merge their metadatas. Args: - docs: A list of Documents to combine. - combine_document_func: A function that takes in a list of Documents and + docs: A list of `Document` objects to combine. + combine_document_func: A function that takes in a list of `Document` objects and optionally addition keyword parameters and combines them into a single string. **kwargs: Arbitrary additional keyword params to pass to the - combine_document_func. + `combine_document_func`. Returns: - A single Document with the output of combine_document_func for the page content - and the combined metadata's of all the input documents. All metadata values - are strings, and where there are overlapping keys across documents the - values are joined by ", ". + A single `Document` with the output of `combine_document_func` for the page + content and the combined metadata's of all the input documents. All metadata + values are strings, and where there are overlapping keys across documents + the values are joined by `', '`. """ result = combine_document_func(docs, **kwargs) combined_metadata = {k: str(v) for k, v in docs[0].metadata.items()} @@ -103,18 +104,18 @@ async def acollapse_docs( """Execute a collapse function on a set of documents and merge their metadatas. Args: - docs: A list of Documents to combine. - combine_document_func: A function that takes in a list of Documents and + docs: A list of `Document` objects to combine. + combine_document_func: A function that takes in a list of `Document` objects and optionally addition keyword parameters and combines them into a single string. **kwargs: Arbitrary additional keyword params to pass to the - combine_document_func. + `combine_document_func`. Returns: - A single Document with the output of combine_document_func for the page content - and the combined metadata's of all the input documents. All metadata values - are strings, and where there are overlapping keys across documents the - values are joined by ", ". + A single `Document` with the output of `combine_document_func` for the page + content and the combined metadata's of all the input documents. All metadata + values are strings, and where there are overlapping keys across documents + the values are joined by `', '`. """ result = await combine_document_func(docs, **kwargs) combined_metadata = {k: str(v) for k, v in docs[0].metadata.items()} @@ -141,11 +142,11 @@ class ReduceDocumentsChain(BaseCombineDocumentsChain): This involves - - combine_documents_chain - - - collapse_documents_chain + - `combine_documents_chain` + - `collapse_documents_chain` `combine_documents_chain` is ALWAYS provided. This is final chain that is called. + We pass all previous results to this chain, and the output of this chain is returned as a final result. @@ -162,7 +163,7 @@ class ReduceDocumentsChain(BaseCombineDocumentsChain): ReduceDocumentsChain, ) from langchain_core.prompts import PromptTemplate - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI # This controls how each document will be formatted. Specifically, # it will be passed to `format_document` - see that function for more @@ -171,11 +172,11 @@ class ReduceDocumentsChain(BaseCombineDocumentsChain): input_variables=["page_content"], template="{page_content}" ) document_variable_name = "context" - llm = OpenAI() + model = OpenAI() # The prompt here should take as an input variable the # `document_variable_name` prompt = PromptTemplate.from_template("Summarize this content: {context}") - llm_chain = LLMChain(llm=llm, prompt=prompt) + llm_chain = LLMChain(llm=model, prompt=prompt) combine_documents_chain = StuffDocumentsChain( llm_chain=llm_chain, document_prompt=document_prompt, @@ -188,7 +189,7 @@ class ReduceDocumentsChain(BaseCombineDocumentsChain): # which is specifically aimed at collapsing documents BEFORE # the final call. prompt = PromptTemplate.from_template("Collapse this content: {context}") - llm_chain = LLMChain(llm=llm, prompt=prompt) + llm_chain = LLMChain(llm=model, prompt=prompt) collapse_documents_chain = StuffDocumentsChain( llm_chain=llm_chain, document_prompt=document_prompt, @@ -203,19 +204,28 @@ class ReduceDocumentsChain(BaseCombineDocumentsChain): combine_documents_chain: BaseCombineDocumentsChain """Final chain to call to combine documents. - This is typically a StuffDocumentsChain.""" + + This is typically a `StuffDocumentsChain`. + """ collapse_documents_chain: BaseCombineDocumentsChain | None = None """Chain to use to collapse documents if needed until they can all fit. - If `None`, will use the combine_documents_chain. - This is typically a StuffDocumentsChain.""" + If `None`, will use the `combine_documents_chain`. + + This is typically a `StuffDocumentsChain`. + """ token_max: int = 3000 - """The maximum number of tokens to group documents into. For example, if - set to 3000 then documents will be grouped into chunks of no greater than - 3000 tokens before trying to combine them into a smaller chunk.""" + """The maximum number of tokens to group documents into. + + For example, if set to 3000 then documents will be grouped into chunks of no greater + than 3000 tokens before trying to combine them into a smaller chunk. + """ collapse_max_retries: int | None = None - """The maximum number of retries to collapse documents to fit token_max. - If `None`, it will keep trying to collapse documents to fit token_max. - Otherwise, after it reaches the max number, it will throw an error""" + """The maximum number of retries to collapse documents to fit `token_max`. + + If `None`, it will keep trying to collapse documents to fit `token_max`. + + Otherwise, after it reaches the max number, it will throw an error. + """ model_config = ConfigDict( arbitrary_types_allowed=True, @@ -248,7 +258,7 @@ class ReduceDocumentsChain(BaseCombineDocumentsChain): Returns: The first element returned is the single string output. The second - element returned is a dictionary of other keys to return. + element returned is a dictionary of other keys to return. """ result_docs, _ = self._collapse( docs, @@ -282,7 +292,7 @@ class ReduceDocumentsChain(BaseCombineDocumentsChain): Returns: The first element returned is the single string output. The second - element returned is a dictionary of other keys to return. + element returned is a dictionary of other keys to return. """ result_docs, _ = await self._acollapse( docs, diff --git a/libs/langchain/langchain_classic/chains/combine_documents/refine.py b/libs/langchain/langchain_classic/chains/combine_documents/refine.py index a6d694c63d1..0e66b5690d1 100644 --- a/libs/langchain/langchain_classic/chains/combine_documents/refine.py +++ b/libs/langchain/langchain_classic/chains/combine_documents/refine.py @@ -46,7 +46,7 @@ class RefineDocumentsChain(BaseCombineDocumentsChain): ```python from langchain_classic.chains import RefineDocumentsChain, LLMChain from langchain_core.prompts import PromptTemplate - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI # This controls how each document will be formatted. Specifically, # it will be passed to `format_document` - see that function for more @@ -55,11 +55,11 @@ class RefineDocumentsChain(BaseCombineDocumentsChain): input_variables=["page_content"], template="{page_content}" ) document_variable_name = "context" - llm = OpenAI() + model = OpenAI() # The prompt here should take as an input variable the # `document_variable_name` prompt = PromptTemplate.from_template("Summarize this content: {context}") - initial_llm_chain = LLMChain(llm=llm, prompt=prompt) + initial_llm_chain = LLMChain(llm=model, prompt=prompt) initial_response_name = "prev_response" # The prompt here should take as an input variable the # `document_variable_name` as well as `initial_response_name` @@ -67,7 +67,7 @@ class RefineDocumentsChain(BaseCombineDocumentsChain): "Here's your first summary: {prev_response}. " "Now add to it based on the following context: {context}" ) - refine_llm_chain = LLMChain(llm=llm, prompt=prompt_refine) + refine_llm_chain = LLMChain(llm=model, prompt=prompt_refine) chain = RefineDocumentsChain( initial_llm_chain=initial_llm_chain, refine_llm_chain=refine_llm_chain, @@ -96,10 +96,7 @@ class RefineDocumentsChain(BaseCombineDocumentsChain): @property def output_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" _output_keys = super().output_keys if self.return_intermediate_steps: _output_keys = [*_output_keys, "intermediate_steps"] diff --git a/libs/langchain/langchain_classic/chains/combine_documents/stuff.py b/libs/langchain/langchain_classic/chains/combine_documents/stuff.py index 5e05d01114e..0556154fe1b 100644 --- a/libs/langchain/langchain_classic/chains/combine_documents/stuff.py +++ b/libs/langchain/langchain_classic/chains/combine_documents/stuff.py @@ -35,10 +35,10 @@ def create_stuff_documents_chain( Args: llm: Language model. - prompt: Prompt template. Must contain input variable "context" (override by + prompt: Prompt template. Must contain input variable `"context"` (override by setting document_variable), which will be used for passing in the formatted documents. - output_parser: Output parser. Defaults to StrOutputParser. + output_parser: Output parser. Defaults to `StrOutputParser`. document_prompt: Prompt used for formatting each document into a string. Input variables can be "page_content" or any metadata keys that are in all documents. "page_content" will automatically retrieve the @@ -47,18 +47,18 @@ def create_stuff_documents_chain( a prompt that only contains `Document.page_content`. document_separator: String separator to use between formatted document strings. document_variable_name: Variable name to use for the formatted documents in the - prompt. Defaults to "context". + prompt. Defaults to `"context"`. Returns: - An LCEL Runnable. The input is a dictionary that must have a "context" key that - maps to a List[Document], and any other input variables expected in the prompt. - The Runnable return type depends on output_parser used. + An LCEL Runnable. The input is a dictionary that must have a `"context"` key + that maps to a `list[Document]`, and any other input variables expected in the + prompt. The `Runnable` return type depends on `output_parser` used. Example: ```python - # pip install -U langchain langchain-community + # pip install -U langchain langchain-openai - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI from langchain_core.documents import Document from langchain_core.prompts import ChatPromptTemplate from langchain_classic.chains.combine_documents import ( @@ -68,8 +68,8 @@ def create_stuff_documents_chain( prompt = ChatPromptTemplate.from_messages( [("system", "What are everyone's favorite colors:\n\n{context}")] ) - llm = ChatOpenAI(model="gpt-3.5-turbo") - chain = create_stuff_documents_chain(llm, prompt) + model = ChatOpenAI(model="gpt-3.5-turbo") + chain = create_stuff_documents_chain(model, prompt) docs = [ Document(page_content="Jesse loves red but not yellow"), @@ -123,7 +123,7 @@ class StuffDocumentsChain(BaseCombineDocumentsChain): ```python from langchain_classic.chains import StuffDocumentsChain, LLMChain from langchain_core.prompts import PromptTemplate - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI # This controls how each document will be formatted. Specifically, # it will be passed to `format_document` - see that function for more @@ -132,11 +132,11 @@ class StuffDocumentsChain(BaseCombineDocumentsChain): input_variables=["page_content"], template="{page_content}" ) document_variable_name = "context" - llm = OpenAI() + model = OpenAI() # The prompt here should take as an input variable the # `document_variable_name` prompt = PromptTemplate.from_template("Summarize this content: {context}") - llm_chain = LLMChain(llm=llm, prompt=prompt) + llm_chain = LLMChain(llm=model, prompt=prompt) chain = StuffDocumentsChain( llm_chain=llm_chain, document_prompt=document_prompt, diff --git a/libs/langchain/langchain_classic/chains/constitutional_ai/base.py b/libs/langchain/langchain_classic/chains/constitutional_ai/base.py index 72bbcbb9323..4eaa74269bc 100644 --- a/libs/langchain/langchain_classic/chains/constitutional_ai/base.py +++ b/libs/langchain/langchain_classic/chains/constitutional_ai/base.py @@ -58,7 +58,7 @@ class ConstitutionalChain(Chain): from langgraph.graph import END, START, StateGraph from typing_extensions import Annotated, TypedDict - llm = ChatOpenAI(model="gpt-4o-mini") + model = ChatOpenAI(model="gpt-4o-mini") class Critique(TypedDict): """Generate a critique, if needed.""" @@ -86,9 +86,9 @@ class ConstitutionalChain(Chain): "Revision Request: {revision_request}" ) - chain = llm | StrOutputParser() - critique_chain = critique_prompt | llm.with_structured_output(Critique) - revision_chain = revision_prompt | llm | StrOutputParser() + chain = model | StrOutputParser() + critique_chain = critique_prompt | model.with_structured_output(Critique) + revision_chain = revision_prompt | model | StrOutputParser() class State(TypedDict): @@ -165,21 +165,21 @@ class ConstitutionalChain(Chain): Example: ```python - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI from langchain_classic.chains import LLMChain, ConstitutionalChain from langchain_classic.chains.constitutional_ai.models \ import ConstitutionalPrinciple - llm = OpenAI() + llmodelm = OpenAI() qa_prompt = PromptTemplate( template="Q: {question} A:", input_variables=["question"], ) - qa_chain = LLMChain(llm=llm, prompt=qa_prompt) + qa_chain = LLMChain(llm=model, prompt=qa_prompt) constitutional_chain = ConstitutionalChain.from_llm( - llm=llm, + llm=model, chain=qa_chain, constitutional_principles=[ ConstitutionalPrinciple( diff --git a/libs/langchain/langchain_classic/chains/conversation/base.py b/libs/langchain/langchain_classic/chains/conversation/base.py index 07c1c98ee08..98a73a97299 100644 --- a/libs/langchain/langchain_classic/chains/conversation/base.py +++ b/libs/langchain/langchain_classic/chains/conversation/base.py @@ -1,11 +1,11 @@ """Chain that carries on a conversation and calls an LLM.""" from langchain_core._api import deprecated -from langchain_core.memory import BaseMemory from langchain_core.prompts import BasePromptTemplate from pydantic import ConfigDict, Field, model_validator from typing_extensions import Self, override +from langchain_classic.base_memory import BaseMemory from langchain_classic.chains.conversation.prompt import PROMPT from langchain_classic.chains.llm import LLMChain from langchain_classic.memory.buffer import ConversationBufferMemory @@ -47,9 +47,9 @@ class ConversationChain(LLMChain): return store[session_id] - llm = ChatOpenAI(model="gpt-3.5-turbo-0125") + model = ChatOpenAI(model="gpt-3.5-turbo-0125") - chain = RunnableWithMessageHistory(llm, get_session_history) + chain = RunnableWithMessageHistory(model, get_session_history) chain.invoke( "Hi I'm Bob.", config={"configurable": {"session_id": "1"}}, @@ -85,9 +85,9 @@ class ConversationChain(LLMChain): return store[session_id] - llm = ChatOpenAI(model="gpt-3.5-turbo-0125") + model = ChatOpenAI(model="gpt-3.5-turbo-0125") - chain = RunnableWithMessageHistory(llm, get_session_history) + chain = RunnableWithMessageHistory(model, get_session_history) chain.invoke( "Hi I'm Bob.", config={"configurable": {"session_id": "1"}}, @@ -97,7 +97,7 @@ class ConversationChain(LLMChain): Example: ```python from langchain_classic.chains import ConversationChain - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI conversation = ConversationChain(llm=OpenAI()) ``` @@ -108,8 +108,8 @@ class ConversationChain(LLMChain): prompt: BasePromptTemplate = PROMPT """Default conversation prompt to use.""" - input_key: str = "input" #: :meta private: - output_key: str = "response" #: :meta private: + input_key: str = "input" + output_key: str = "response" model_config = ConfigDict( arbitrary_types_allowed=True, diff --git a/libs/langchain/langchain_classic/chains/conversational_retrieval/base.py b/libs/langchain/langchain_classic/chains/conversational_retrieval/base.py index d1424619ec4..db8bd9daf31 100644 --- a/libs/langchain/langchain_classic/chains/conversational_retrieval/base.py +++ b/libs/langchain/langchain_classic/chains/conversational_retrieval/base.py @@ -122,10 +122,7 @@ class BaseConversationalRetrievalChain(Chain): @property def output_keys(self) -> list[str]: - """Return the output keys. - - :meta private: - """ + """Return the output keys.""" _output_keys = [self.output_key] if self.return_source_documents: _output_keys = [*_output_keys, "source_documents"] @@ -283,7 +280,7 @@ class ConversationalRetrievalChain(BaseConversationalRetrievalChain): retriever = ... # Your retriever - llm = ChatOpenAI() + model = ChatOpenAI() # Contextualize question contextualize_q_system_prompt = ( @@ -301,7 +298,7 @@ class ConversationalRetrievalChain(BaseConversationalRetrievalChain): ] ) history_aware_retriever = create_history_aware_retriever( - llm, retriever, contextualize_q_prompt + model, retriever, contextualize_q_prompt ) # Answer question @@ -324,7 +321,7 @@ class ConversationalRetrievalChain(BaseConversationalRetrievalChain): # Below we use create_stuff_documents_chain to feed all retrieved context # into the LLM. Note that we can also use StuffDocumentsChain and other # instances of BaseCombineDocumentsChain. - question_answer_chain = create_stuff_documents_chain(llm, qa_prompt) + question_answer_chain = create_stuff_documents_chain(model, qa_prompt) rag_chain = create_retrieval_chain(history_aware_retriever, question_answer_chain) # Usage: @@ -337,17 +334,17 @@ class ConversationalRetrievalChain(BaseConversationalRetrievalChain): The algorithm for this chain consists of three parts: 1. Use the chat history and the new question to create a "standalone question". - This is done so that this question can be passed into the retrieval step to fetch - relevant documents. If only the new question was passed in, then relevant context - may be lacking. If the whole conversation was passed into retrieval, there may - be unnecessary information there that would distract from retrieval. + This is done so that this question can be passed into the retrieval step to + fetch relevant documents. If only the new question was passed in, then relevant + context may be lacking. If the whole conversation was passed into retrieval, + there may be unnecessary information there that would distract from retrieval. 2. This new question is passed to the retriever and relevant documents are - returned. + returned. 3. The retrieved documents are passed to an LLM along with either the new question - (default behavior) or the original question and chat history to generate a final - response. + (default behavior) or the original question and chat history to generate a final + response. Example: ```python @@ -357,7 +354,7 @@ class ConversationalRetrievalChain(BaseConversationalRetrievalChain): ConversationalRetrievalChain, ) from langchain_core.prompts import PromptTemplate - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI combine_docs_chain = StuffDocumentsChain(...) vectorstore = ... @@ -371,8 +368,8 @@ class ConversationalRetrievalChain(BaseConversationalRetrievalChain): "Follow up question: {question}" ) prompt = PromptTemplate.from_template(template) - llm = OpenAI() - question_generator_chain = LLMChain(llm=llm, prompt=prompt) + model = OpenAI() + question_generator_chain = LLMChain(llm=model, prompt=prompt) chain = ConversationalRetrievalChain( combine_docs_chain=combine_docs_chain, retriever=retriever, diff --git a/libs/langchain/langchain_classic/chains/elasticsearch_database/base.py b/libs/langchain/langchain_classic/chains/elasticsearch_database/base.py index 589cccbb832..e041de38351 100644 --- a/libs/langchain/langchain_classic/chains/elasticsearch_database/base.py +++ b/libs/langchain/langchain_classic/chains/elasticsearch_database/base.py @@ -31,7 +31,7 @@ class ElasticsearchDatabaseChain(Chain): Example: ```python from langchain_classic.chains import ElasticsearchDatabaseChain - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI from elasticsearch import Elasticsearch database = Elasticsearch("http://localhost:9200") @@ -49,8 +49,8 @@ class ElasticsearchDatabaseChain(Chain): """Number of results to return from the query""" ignore_indices: list[str] | None = None include_indices: list[str] | None = None - input_key: str = "question" #: :meta private: - output_key: str = "result" #: :meta private: + input_key: str = "question" + output_key: str = "result" sample_documents_in_index_info: int = 3 return_intermediate_steps: bool = False """Whether or not to return the intermediate steps along with the final answer.""" @@ -69,18 +69,12 @@ class ElasticsearchDatabaseChain(Chain): @property def input_keys(self) -> list[str]: - """Return the singular input key. - - :meta private: - """ + """Return the singular input key.""" return [self.input_key] @property def output_keys(self) -> list[str]: - """Return the singular output key. - - :meta private: - """ + """Return the singular output key.""" if not self.return_intermediate_steps: return [self.output_key] return [self.output_key, INTERMEDIATE_STEPS_KEY] @@ -198,7 +192,7 @@ class ElasticsearchDatabaseChain(Chain): query_prompt: The prompt to use for query construction. answer_prompt: The prompt to use for answering user question given data. query_output_parser: The output parser to use for parsing model-generated - ES query. Defaults to SimpleJsonOutputParser. + ES query. Defaults to `SimpleJsonOutputParser`. kwargs: Additional arguments to pass to the constructor. """ query_prompt = query_prompt or DSL_PROMPT diff --git a/libs/langchain/langchain_classic/chains/history_aware_retriever.py b/libs/langchain/langchain_classic/chains/history_aware_retriever.py index ed16c85560b..0926cdeaedf 100644 --- a/libs/langchain/langchain_classic/chains/history_aware_retriever.py +++ b/libs/langchain/langchain_classic/chains/history_aware_retriever.py @@ -20,28 +20,28 @@ def create_history_aware_retriever( Args: llm: Language model to use for generating a search term given chat history - retriever: RetrieverLike object that takes a string as input and outputs - a list of Documents. + retriever: `RetrieverLike` object that takes a string as input and outputs + a list of `Document` objects. prompt: The prompt used to generate the search query for the retriever. Returns: An LCEL Runnable. The runnable input must take in `input`, and if there is chat history should take it in the form of `chat_history`. - The Runnable output is a list of Documents + The `Runnable` output is a list of `Document` objects Example: ```python # pip install -U langchain langchain-community - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI from langchain_classic.chains import create_history_aware_retriever from langchain_classic import hub rephrase_prompt = hub.pull("langchain-ai/chat-langchain-rephrase") - llm = ChatOpenAI() + model = ChatOpenAI() retriever = ... chat_retriever_chain = create_history_aware_retriever( - llm, retriever, rephrase_prompt + model, retriever, rephrase_prompt ) chain.invoke({"input": "...", "chat_history": }) diff --git a/libs/langchain/langchain_classic/chains/llm.py b/libs/langchain/langchain_classic/chains/llm.py index 534cd97794c..274fd1d0d83 100644 --- a/libs/langchain/langchain_classic/chains/llm.py +++ b/libs/langchain/langchain_classic/chains/llm.py @@ -55,8 +55,8 @@ class LLMChain(Chain): prompt_template = "Tell me a {adjective} joke" prompt = PromptTemplate(input_variables=["adjective"], template=prompt_template) - llm = OpenAI() - chain = prompt | llm | StrOutputParser() + model = OpenAI() + chain = prompt | model | StrOutputParser() chain.invoke("your adjective here") ``` @@ -64,12 +64,12 @@ class LLMChain(Chain): Example: ```python from langchain_classic.chains import LLMChain - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI from langchain_core.prompts import PromptTemplate prompt_template = "Tell me a {adjective} joke" prompt = PromptTemplate(input_variables=["adjective"], template=prompt_template) - llm = LLMChain(llm=OpenAI(), prompt=prompt) + model = LLMChain(llm=OpenAI(), prompt=prompt) ``` """ @@ -82,14 +82,14 @@ class LLMChain(Chain): """Prompt object to use.""" llm: Runnable[LanguageModelInput, str] | Runnable[LanguageModelInput, BaseMessage] """Language model to call.""" - output_key: str = "text" #: :meta private: + output_key: str = "text" output_parser: BaseLLMOutputParser = Field(default_factory=StrOutputParser) """Output parser to use. Defaults to one that takes the most likely string but does not change it otherwise.""" return_final_only: bool = True - """Whether to return only the final parsed result. Defaults to `True`. - If false, will return a bunch of extra information about the generation.""" + """Whether to return only the final parsed result. + If `False`, will return a bunch of extra information about the generation.""" llm_kwargs: dict = Field(default_factory=dict) model_config = ConfigDict( @@ -99,18 +99,12 @@ class LLMChain(Chain): @property def input_keys(self) -> list[str]: - """Will be whatever keys the prompt expects. - - :meta private: - """ + """Will be whatever keys the prompt expects.""" return self.prompt.input_variables @property def output_keys(self) -> list[str]: - """Will always return text key. - - :meta private: - """ + """Will always return text key.""" if self.return_final_only: return [self.output_key] return [self.output_key, "full_generation"] diff --git a/libs/langchain/langchain_classic/chains/llm_checker/base.py b/libs/langchain/langchain_classic/chains/llm_checker/base.py index 5bd53322e9c..9db3194fb3d 100644 --- a/libs/langchain/langchain_classic/chains/llm_checker/base.py +++ b/libs/langchain/langchain_classic/chains/llm_checker/base.py @@ -77,11 +77,11 @@ class LLMCheckerChain(Chain): Example: ```python - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI from langchain_classic.chains import LLMCheckerChain - llm = OpenAI(temperature=0.7) - checker_chain = LLMCheckerChain.from_llm(llm) + model = OpenAI(temperature=0.7) + checker_chain = LLMCheckerChain.from_llm(model) ``` """ @@ -97,8 +97,8 @@ class LLMCheckerChain(Chain): """[Deprecated]""" revised_answer_prompt: PromptTemplate = REVISED_ANSWER_PROMPT """[Deprecated] Prompt to use when questioning the documents.""" - input_key: str = "query" #: :meta private: - output_key: str = "result" #: :meta private: + input_key: str = "query" + output_key: str = "result" model_config = ConfigDict( arbitrary_types_allowed=True, @@ -138,18 +138,12 @@ class LLMCheckerChain(Chain): @property def input_keys(self) -> list[str]: - """Return the singular input key. - - :meta private: - """ + """Return the singular input key.""" return [self.input_key] @property def output_keys(self) -> list[str]: - """Return the singular output key. - - :meta private: - """ + """Return the singular output key.""" return [self.output_key] def _call( diff --git a/libs/langchain/langchain_classic/chains/llm_math/base.py b/libs/langchain/langchain_classic/chains/llm_math/base.py index 86eea13e1b7..b8b038d5201 100644 --- a/libs/langchain/langchain_classic/chains/llm_math/base.py +++ b/libs/langchain/langchain_classic/chains/llm_math/base.py @@ -84,9 +84,9 @@ class LLMMathChain(Chain): ) ) - llm = ChatOpenAI(model="gpt-4o-mini", temperature=0) + model = ChatOpenAI(model="gpt-4o-mini", temperature=0) tools = [calculator] - llm_with_tools = llm.bind_tools(tools, tool_choice="any") + model_with_tools = model.bind_tools(tools, tool_choice="any") class ChainState(TypedDict): \"\"\"LangGraph state.\"\"\" @@ -95,11 +95,11 @@ class LLMMathChain(Chain): async def acall_chain(state: ChainState, config: RunnableConfig): last_message = state["messages"][-1] - response = await llm_with_tools.ainvoke(state["messages"], config) + response = await model_with_tools.ainvoke(state["messages"], config) return {"messages": [response]} async def acall_model(state: ChainState, config: RunnableConfig): - response = await llm.ainvoke(state["messages"], config) + response = await model.ainvoke(state["messages"], config) return {"messages": [response]} graph_builder = StateGraph(ChainState) @@ -145,7 +145,7 @@ class LLMMathChain(Chain): Example: ```python from langchain_classic.chains import LLMMathChain - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI llm_math = LLMMathChain.from_llm(OpenAI()) ``` @@ -156,8 +156,8 @@ class LLMMathChain(Chain): """[Deprecated] LLM wrapper to use.""" prompt: BasePromptTemplate = PROMPT """[Deprecated] Prompt to use to translate to python if necessary.""" - input_key: str = "question" #: :meta private: - output_key: str = "answer" #: :meta private: + input_key: str = "question" + output_key: str = "answer" model_config = ConfigDict( arbitrary_types_allowed=True, @@ -189,18 +189,12 @@ class LLMMathChain(Chain): @property def input_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" return [self.input_key] @property def output_keys(self) -> list[str]: - """Expect output key. - - :meta private: - """ + """Expect output key.""" return [self.output_key] def _evaluate_expression(self, expression: str) -> str: diff --git a/libs/langchain/langchain_classic/chains/llm_summarization_checker/base.py b/libs/langchain/langchain_classic/chains/llm_summarization_checker/base.py index 28e745772a7..e5d86dd2f03 100644 --- a/libs/langchain/langchain_classic/chains/llm_summarization_checker/base.py +++ b/libs/langchain/langchain_classic/chains/llm_summarization_checker/base.py @@ -80,11 +80,11 @@ class LLMSummarizationCheckerChain(Chain): Example: ```python - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI from langchain_classic.chains import LLMSummarizationCheckerChain - llm = OpenAI(temperature=0.0) - checker_chain = LLMSummarizationCheckerChain.from_llm(llm) + model = OpenAI(temperature=0.0) + checker_chain = LLMSummarizationCheckerChain.from_llm(model) ``` """ @@ -101,8 +101,8 @@ class LLMSummarizationCheckerChain(Chain): are_all_true_prompt: PromptTemplate = ARE_ALL_TRUE_PROMPT """[Deprecated]""" - input_key: str = "query" #: :meta private: - output_key: str = "result" #: :meta private: + input_key: str = "query" + output_key: str = "result" max_checks: int = 2 """Maximum number of times to check the assertions. Default to double-checking.""" @@ -134,18 +134,12 @@ class LLMSummarizationCheckerChain(Chain): @property def input_keys(self) -> list[str]: - """Return the singular input key. - - :meta private: - """ + """Return the singular input key.""" return [self.input_key] @property def output_keys(self) -> list[str]: - """Return the singular output key. - - :meta private: - """ + """Return the singular output key.""" return [self.output_key] def _call( diff --git a/libs/langchain/langchain_classic/chains/mapreduce.py b/libs/langchain/langchain_classic/chains/mapreduce.py index 81dc9731906..1845439f7a6 100644 --- a/libs/langchain/langchain_classic/chains/mapreduce.py +++ b/libs/langchain/langchain_classic/chains/mapreduce.py @@ -44,8 +44,8 @@ class MapReduceChain(Chain): """Chain to use to combine documents.""" text_splitter: TextSplitter """Text splitter to use.""" - input_key: str = "input_text" #: :meta private: - output_key: str = "output_text" #: :meta private: + input_key: str = "input_text" + output_key: str = "output_text" @classmethod def from_params( @@ -88,18 +88,12 @@ class MapReduceChain(Chain): @property def input_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" return [self.input_key] @property def output_keys(self) -> list[str]: - """Return output key. - - :meta private: - """ + """Return output key.""" return [self.output_key] def _call( diff --git a/libs/langchain/langchain_classic/chains/moderation.py b/libs/langchain/langchain_classic/chains/moderation.py index 1a7e9aa5002..f4c80e4cdb3 100644 --- a/libs/langchain/langchain_classic/chains/moderation.py +++ b/libs/langchain/langchain_classic/chains/moderation.py @@ -30,14 +30,14 @@ class OpenAIModerationChain(Chain): ``` """ - client: Any = None #: :meta private: - async_client: Any = None #: :meta private: + client: Any = None + async_client: Any = None model_name: str | None = None """Moderation model name to use.""" error: bool = False """Whether or not to error if bad content was found.""" - input_key: str = "input" #: :meta private: - output_key: str = "output" #: :meta private: + input_key: str = "input" + output_key: str = "output" openai_api_key: str | None = None openai_organization: str | None = None openai_pre_1_0: bool = Field(default=False) @@ -84,18 +84,12 @@ class OpenAIModerationChain(Chain): @property def input_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" return [self.input_key] @property def output_keys(self) -> list[str]: - """Return output key. - - :meta private: - """ + """Return output key.""" return [self.output_key] def _moderate(self, text: str, results: Any) -> str: diff --git a/libs/langchain/langchain_classic/chains/natbot/base.py b/libs/langchain/langchain_classic/chains/natbot/base.py index 6c482bccd86..d36033525c3 100644 --- a/libs/langchain/langchain_classic/chains/natbot/base.py +++ b/libs/langchain/langchain_classic/chains/natbot/base.py @@ -40,7 +40,7 @@ class NatBotChain(Chain): access and use this chain, and isolate the network access of the server that hosts this chain. - See https://python.langchain.com/docs/security for more information. + See https://docs.langchain.com/oss/python/security-policy for more information. Example: ```python @@ -55,10 +55,10 @@ class NatBotChain(Chain): """Objective that NatBot is tasked with completing.""" llm: BaseLanguageModel | None = None """[Deprecated] LLM wrapper to use.""" - input_url_key: str = "url" #: :meta private: - input_browser_content_key: str = "browser_content" #: :meta private: - previous_command: str = "" #: :meta private: - output_key: str = "command" #: :meta private: + input_url_key: str = "url" + input_browser_content_key: str = "browser_content" + previous_command: str = "" + output_key: str = "command" model_config = ConfigDict( arbitrary_types_allowed=True, @@ -84,8 +84,8 @@ class NatBotChain(Chain): """Load with default LLMChain.""" msg = ( "This method is no longer implemented. Please use from_llm." - "llm = OpenAI(temperature=0.5, best_of=10, n=3, max_tokens=50)" - "For example, NatBotChain.from_llm(llm, objective)" + "model = OpenAI(temperature=0.5, best_of=10, n=3, max_tokens=50)" + "For example, NatBotChain.from_llm(model, objective)" ) raise NotImplementedError(msg) @@ -102,18 +102,12 @@ class NatBotChain(Chain): @property def input_keys(self) -> list[str]: - """Expect url and browser content. - - :meta private: - """ + """Expect url and browser content.""" return [self.input_url_key, self.input_browser_content_key] @property def output_keys(self) -> list[str]: - """Return command. - - :meta private: - """ + """Return command.""" return [self.output_key] def _call( diff --git a/libs/langchain/langchain_classic/chains/natbot/crawler.py b/libs/langchain/langchain_classic/chains/natbot/crawler.py index 692d90fad39..037977038ce 100644 --- a/libs/langchain/langchain_classic/chains/natbot/crawler.py +++ b/libs/langchain/langchain_classic/chains/natbot/crawler.py @@ -58,7 +58,7 @@ class Crawler: Make sure to scope permissions to the minimal permissions necessary for the application. - See https://python.langchain.com/docs/security for more information. + See https://docs.langchain.com/oss/python/security-policy for more information. """ def __init__(self) -> None: diff --git a/libs/langchain/langchain_classic/chains/openai_functions/base.py b/libs/langchain/langchain_classic/chains/openai_functions/base.py index 417ae1e2a64..0a3e89fc304 100644 --- a/libs/langchain/langchain_classic/chains/openai_functions/base.py +++ b/libs/langchain/langchain_classic/chains/openai_functions/base.py @@ -85,7 +85,7 @@ def create_openai_fn_chain( from typing import Optional from langchain_classic.chains.openai_functions import create_openai_fn_chain - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate from pydantic import BaseModel, Field @@ -107,7 +107,7 @@ def create_openai_fn_chain( fav_food: str | None = Field(None, description="The dog's favorite food") - llm = ChatOpenAI(model="gpt-4", temperature=0) + model = ChatOpenAI(model="gpt-4", temperature=0) prompt = ChatPromptTemplate.from_messages( [ ("system", "You are a world class algorithm for recording entities."), @@ -115,7 +115,7 @@ def create_openai_fn_chain( ("human", "Tip: Make sure to answer in the correct format"), ] ) - chain = create_openai_fn_chain([RecordPerson, RecordDog], llm, prompt) + chain = create_openai_fn_chain([RecordPerson, RecordDog], model, prompt) chain.run("Harry was a chubby brown beagle who loved chicken") # -> RecordDog(name="Harry", color="brown", fav_food="chicken") @@ -179,7 +179,7 @@ def create_structured_output_chain( from typing import Optional from langchain_classic.chains.openai_functions import create_structured_output_chain - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate from pydantic import BaseModel, Field @@ -191,7 +191,7 @@ def create_structured_output_chain( color: str = Field(..., description="The dog's color") fav_food: str | None = Field(None, description="The dog's favorite food") - llm = ChatOpenAI(model="gpt-3.5-turbo-0613", temperature=0) + model = ChatOpenAI(model="gpt-3.5-turbo-0613", temperature=0) prompt = ChatPromptTemplate.from_messages( [ ("system", "You are a world class algorithm for extracting information in structured formats."), @@ -199,7 +199,7 @@ def create_structured_output_chain( ("human", "Tip: Make sure to answer in the correct format"), ] ) - chain = create_structured_output_chain(Dog, llm, prompt) + chain = create_structured_output_chain(Dog, model, prompt) chain.run("Harry was a chubby brown beagle who loved chicken") # -> Dog(name="Harry", color="brown", fav_food="chicken") diff --git a/libs/langchain/langchain_classic/chains/openai_functions/citation_fuzzy_match.py b/libs/langchain/langchain_classic/chains/openai_functions/citation_fuzzy_match.py index 0a8cc3bed02..4b3fd369d39 100644 --- a/libs/langchain/langchain_classic/chains/openai_functions/citation_fuzzy_match.py +++ b/libs/langchain/langchain_classic/chains/openai_functions/citation_fuzzy_match.py @@ -83,12 +83,12 @@ def create_citation_fuzzy_match_runnable(llm: BaseChatModel) -> Runnable: from langchain_classic.chains import create_citation_fuzzy_match_runnable from langchain_openai import ChatOpenAI - llm = ChatOpenAI(model="gpt-4o-mini") + model = ChatOpenAI(model="gpt-4o-mini") context = "Alice has blue eyes. Bob has brown eyes. Charlie has green eyes." question = "What color are Bob's eyes?" - chain = create_citation_fuzzy_match_runnable(llm) + chain = create_citation_fuzzy_match_runnable(model) chain.invoke({"question": question, "context": context}) ``` diff --git a/libs/langchain/langchain_classic/chains/openai_functions/extraction.py b/libs/langchain/langchain_classic/chains/openai_functions/extraction.py index 1230bc769f7..467ec638708 100644 --- a/libs/langchain/langchain_classic/chains/openai_functions/extraction.py +++ b/libs/langchain/langchain_classic/chains/openai_functions/extraction.py @@ -50,13 +50,7 @@ Passage: "LangChain has introduced a method called `with_structured_output` that" "is available on ChatModels capable of tool calling." "You can read more about the method here: " - ". " - "Please follow our extraction use case documentation for more guidelines" - "on how to do information extraction with LLMs." - ". " - "If you notice other issues, please provide " - "feedback here:" - "" + "." ), removal="1.0", alternative=( @@ -69,12 +63,12 @@ Passage: punchline: str = Field(description="The punchline to the joke") # Or any other chat model that supports tools. - # Please reference to to the documentation of structured_output + # Please reference to the documentation of structured_output # to see an up to date list of which models support # with_structured_output. - model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0) - structured_llm = model.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats. + model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0) + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats. Make sure to call the Joke function.") """ ), @@ -94,8 +88,7 @@ def create_extraction_chain( prompt: The prompt to use for extraction. tags: Optional list of tags to associate with the chain. verbose: Whether to run in verbose mode. In verbose mode, some intermediate - logs will be printed to the console. Defaults to the global `verbose` value, - accessible via `langchain.globals.get_verbose()`. + logs will be printed to the console. Returns: Chain that can be used to extract information from a passage. @@ -120,7 +113,7 @@ def create_extraction_chain( "LangChain has introduced a method called `with_structured_output` that" "is available on ChatModels capable of tool calling." "You can read more about the method here: " - ". " + ". " "Please follow our extraction use case documentation for more guidelines" "on how to do information extraction with LLMs." ". " @@ -139,12 +132,12 @@ def create_extraction_chain( punchline: str = Field(description="The punchline to the joke") # Or any other chat model that supports tools. - # Please reference to to the documentation of structured_output + # Please reference to the documentation of structured_output # to see an up to date list of which models support # with_structured_output. - model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0) - structured_llm = model.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats. + model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0) + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats. Make sure to call the Joke function.") """ ), @@ -155,15 +148,14 @@ def create_extraction_chain_pydantic( prompt: BasePromptTemplate | None = None, verbose: bool = False, # noqa: FBT001,FBT002 ) -> Chain: - """Creates a chain that extracts information from a passage using pydantic schema. + """Creates a chain that extracts information from a passage using Pydantic schema. Args: - pydantic_schema: The pydantic schema of the entities to extract. + pydantic_schema: The Pydantic schema of the entities to extract. llm: The language model to use. prompt: The prompt to use for extraction. verbose: Whether to run in verbose mode. In verbose mode, some intermediate - logs will be printed to the console. Defaults to the global `verbose` value, - accessible via `langchain.globals.get_verbose()` + logs will be printed to the console. Returns: Chain that can be used to extract information from a passage. diff --git a/libs/langchain/langchain_classic/chains/openai_functions/openapi.py b/libs/langchain/langchain_classic/chains/openai_functions/openapi.py index 2cfbfc31522..c29c111e388 100644 --- a/libs/langchain/langchain_classic/chains/openai_functions/openapi.py +++ b/libs/langchain/langchain_classic/chains/openai_functions/openapi.py @@ -330,7 +330,7 @@ def get_openapi_chain( prompt = ChatPromptTemplate.from_template( "Use the provided APIs to respond to this user query:\\n\\n{query}" ) - llm = ChatOpenAI(model="gpt-4o-mini", temperature=0).bind_tools(tools) + model = ChatOpenAI(model="gpt-4o-mini", temperature=0).bind_tools(tools) def _execute_tool(message) -> Any: if tool_calls := message.tool_calls: @@ -341,7 +341,7 @@ def get_openapi_chain( else: return message.content - chain = prompt | llm | _execute_tool + chain = prompt | model | _execute_tool ``` ```python @@ -394,7 +394,7 @@ def get_openapi_chain( msg = ( "Must provide an LLM for this chain.For example,\n" "from langchain_openai import ChatOpenAI\n" - "llm = ChatOpenAI()\n" + "model = ChatOpenAI()\n" ) raise ValueError(msg) prompt = prompt or ChatPromptTemplate.from_template( diff --git a/libs/langchain/langchain_classic/chains/openai_functions/qa_with_structure.py b/libs/langchain/langchain_classic/chains/openai_functions/qa_with_structure.py index 8e7bede26c6..b36a9c49a22 100644 --- a/libs/langchain/langchain_classic/chains/openai_functions/qa_with_structure.py +++ b/libs/langchain/langchain_classic/chains/openai_functions/qa_with_structure.py @@ -51,8 +51,7 @@ def create_qa_with_structure_chain( Args: llm: Language model to use for the chain. schema: Pydantic schema to use for the output. - output_parser: Output parser to use. Should be one of `pydantic` or `base`. - Default to `base`. + output_parser: Output parser to use. Should be one of `'pydantic'` or `'base'`. prompt: Optional prompt to use for the chain. verbose: Whether to run the chain in verbose mode. diff --git a/libs/langchain/langchain_classic/chains/openai_functions/tagging.py b/libs/langchain/langchain_classic/chains/openai_functions/tagging.py index 81021bc842d..6c30b21cd0c 100644 --- a/libs/langchain/langchain_classic/chains/openai_functions/tagging.py +++ b/libs/langchain/langchain_classic/chains/openai_functions/tagging.py @@ -41,7 +41,7 @@ Passage: "See API reference for this function for replacement: <" "https://api.python.langchain.com/en/latest/chains/langchain.chains.openai_functions.tagging.create_tagging_chain.html" "> You can read more about `with_structured_output` here: " - ". " + ". " "If you notice other issues, please provide " "feedback here: " "" @@ -73,18 +73,18 @@ def create_tagging_chain( punchline: Annotated[str, ..., "The punchline of the joke"] # Or any other chat model that supports tools. - # Please reference to to the documentation of structured_output + # Please reference to the documentation of structured_output # to see an up to date list of which models support # with_structured_output. model = ChatAnthropic(model="claude-3-haiku-20240307", temperature=0) - structured_llm = model.with_structured_output(Joke) - structured_llm.invoke( + structured_model = model.with_structured_output(Joke) + structured_model.invoke( "Why did the cat cross the road? To get to the other " "side... and then lay down in the middle of it!" ) ``` - Read more here: https://python.langchain.com/docs/how_to/structured_output/ + Read more here: https://docs.langchain.com/oss/python/langchain/models#structured-outputs Args: schema: The schema of the entities to extract. @@ -93,7 +93,7 @@ def create_tagging_chain( kwargs: Additional keyword arguments to pass to the chain. Returns: - Chain (LLMChain) that can be used to extract information from a passage. + Chain (`LLMChain`) that can be used to extract information from a passage. """ function = _get_tagging_function(schema) @@ -117,7 +117,7 @@ def create_tagging_chain( "See API reference for this function for replacement: <" "https://api.python.langchain.com/en/latest/chains/langchain.chains.openai_functions.tagging.create_tagging_chain_pydantic.html" "> You can read more about `with_structured_output` here: " - ". " + ". " "If you notice other issues, please provide " "feedback here: " "" @@ -130,10 +130,10 @@ def create_tagging_chain_pydantic( prompt: ChatPromptTemplate | None = None, **kwargs: Any, ) -> Chain: - """Create tagging chain from pydantic schema. + """Create tagging chain from Pydantic schema. Create a chain that extracts information from a passage - based on a pydantic schema. + based on a Pydantic schema. This function is deprecated. Please use `with_structured_output` instead. See example usage below: @@ -149,27 +149,27 @@ def create_tagging_chain_pydantic( # Or any other chat model that supports tools. - # Please reference to to the documentation of structured_output + # Please reference to the documentation of structured_output # to see an up to date list of which models support # with_structured_output. - model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0) - structured_llm = model.with_structured_output(Joke) - structured_llm.invoke( + model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0) + structured_model = model.with_structured_output(Joke) + structured_model.invoke( "Why did the cat cross the road? To get to the other " "side... and then lay down in the middle of it!" ) ``` - Read more here: https://python.langchain.com/docs/how_to/structured_output/ + Read more here: https://docs.langchain.com/oss/python/langchain/models#structured-outputs Args: - pydantic_schema: The pydantic schema of the entities to extract. + pydantic_schema: The Pydantic schema of the entities to extract. llm: The language model to use. prompt: The prompt template to use for the chain. kwargs: Additional keyword arguments to pass to the chain. Returns: - Chain (LLMChain) that can be used to extract information from a passage. + Chain (`LLMChain`) that can be used to extract information from a passage. """ if hasattr(pydantic_schema, "model_json_schema"): diff --git a/libs/langchain/langchain_classic/chains/openai_tools/extraction.py b/libs/langchain/langchain_classic/chains/openai_tools/extraction.py index 36c5729cdbc..31460f401d5 100644 --- a/libs/langchain/langchain_classic/chains/openai_tools/extraction.py +++ b/libs/langchain/langchain_classic/chains/openai_tools/extraction.py @@ -3,7 +3,9 @@ from langchain_core.language_models import BaseLanguageModel from langchain_core.output_parsers.openai_tools import PydanticToolsParser from langchain_core.prompts import ChatPromptTemplate from langchain_core.runnables import Runnable -from langchain_core.utils.function_calling import convert_pydantic_to_openai_function +from langchain_core.utils.function_calling import ( + convert_to_openai_function as convert_pydantic_to_openai_function, +) from pydantic import BaseModel _EXTRACTION_TEMPLATE = """Extract and save the relevant entities mentioned \ @@ -18,7 +20,7 @@ If a property is not present and is not required in the function parameters, do "LangChain has introduced a method called `with_structured_output` that" "is available on ChatModels capable of tool calling." "You can read more about the method here: " - ". " + ". " "Please follow our extraction use case documentation for more guidelines" "on how to do information extraction with LLMs." ". " @@ -38,12 +40,12 @@ If a property is not present and is not required in the function parameters, do punchline: str = Field(description="The punchline to the joke") # Or any other chat model that supports tools. - # Please reference to to the documentation of structured_output + # Please reference to the documentation of structured_output # to see an up to date list of which models support # with_structured_output. - model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0) - structured_llm = model.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats. + model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0) + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats. Make sure to call the Joke function.") """ ), diff --git a/libs/langchain/langchain_classic/chains/prompt_selector.py b/libs/langchain/langchain_classic/chains/prompt_selector.py index bb8665b9fcf..431b8df33fd 100644 --- a/libs/langchain/langchain_classic/chains/prompt_selector.py +++ b/libs/langchain/langchain_classic/chains/prompt_selector.py @@ -48,7 +48,7 @@ def is_llm(llm: BaseLanguageModel) -> bool: llm: Language model to check. Returns: - True if the language model is a BaseLLM model, False otherwise. + `True` if the language model is a BaseLLM model, `False` otherwise. """ return isinstance(llm, BaseLLM) @@ -60,6 +60,6 @@ def is_chat_model(llm: BaseLanguageModel) -> bool: llm: Language model to check. Returns: - True if the language model is a BaseChatModel model, False otherwise. + `True` if the language model is a BaseChatModel model, `False` otherwise. """ return isinstance(llm, BaseChatModel) diff --git a/libs/langchain/langchain_classic/chains/qa_generation/base.py b/libs/langchain/langchain_classic/chains/qa_generation/base.py index df74669d045..fc2ca8824b8 100644 --- a/libs/langchain/langchain_classic/chains/qa_generation/base.py +++ b/libs/langchain/langchain_classic/chains/qa_generation/base.py @@ -34,7 +34,7 @@ class QAGenerationChain(Chain): - Supports async and streaming; - Surfaces prompt and text splitter for easier customization; - Use of JsonOutputParser supports JSONPatch operations in streaming mode, - as well as robustness to markdown. + as well as robustness to markdown. ```python from langchain_classic.chains.qa_generation.prompt import ( @@ -52,14 +52,14 @@ class QAGenerationChain(Chain): from langchain_openai import ChatOpenAI from langchain_text_splitters import RecursiveCharacterTextSplitter - llm = ChatOpenAI() + model = ChatOpenAI() text_splitter = RecursiveCharacterTextSplitter(chunk_overlap=500) split_text = RunnableLambda(lambda x: text_splitter.create_documents([x])) chain = RunnableParallel( text=RunnablePassthrough(), questions=( - split_text | RunnableEach(bound=prompt | llm | JsonOutputParser()) + split_text | RunnableEach(bound=prompt | model | JsonOutputParser()) ), ) ``` diff --git a/libs/langchain/langchain_classic/chains/qa_with_sources/base.py b/libs/langchain/langchain_classic/chains/qa_with_sources/base.py index 30966c177c2..3edd695157d 100644 --- a/libs/langchain/langchain_classic/chains/qa_with_sources/base.py +++ b/libs/langchain/langchain_classic/chains/qa_with_sources/base.py @@ -48,10 +48,10 @@ class BaseQAWithSourcesChain(Chain, ABC): combine_documents_chain: BaseCombineDocumentsChain """Chain to use to combine documents.""" - question_key: str = "question" #: :meta private: - input_docs_key: str = "docs" #: :meta private: - answer_key: str = "answer" #: :meta private: - sources_answer_key: str = "sources" #: :meta private: + question_key: str = "question" + input_docs_key: str = "docs" + answer_key: str = "answer" + sources_answer_key: str = "sources" return_source_documents: bool = False """Return the source documents.""" @@ -109,18 +109,12 @@ class BaseQAWithSourcesChain(Chain, ABC): @property def input_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" return [self.question_key] @property def output_keys(self) -> list[str]: - """Return output key. - - :meta private: - """ + """Return output key.""" _output_keys = [self.answer_key, self.sources_answer_key] if self.return_source_documents: _output_keys = [*_output_keys, "source_documents"] @@ -233,14 +227,11 @@ class BaseQAWithSourcesChain(Chain, ABC): class QAWithSourcesChain(BaseQAWithSourcesChain): """Question answering with sources over documents.""" - input_docs_key: str = "docs" #: :meta private: + input_docs_key: str = "docs" @property def input_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" return [self.input_docs_key, self.question_key] @override diff --git a/libs/langchain/langchain_classic/chains/query_constructor/base.py b/libs/langchain/langchain_classic/chains/query_constructor/base.py index 902ea26b8b3..ed42df6cb29 100644 --- a/libs/langchain/langchain_classic/chains/query_constructor/base.py +++ b/libs/langchain/langchain_classic/chains/query_constructor/base.py @@ -218,7 +218,7 @@ def get_query_constructor_prompt( examples: Optional list of examples to use for the chain. allowed_comparators: Sequence of allowed comparators. allowed_operators: Sequence of allowed operators. - enable_limit: Whether to enable the limit operator. Defaults to `False`. + enable_limit: Whether to enable the limit operator. schema_prompt: Prompt for describing query schema. Should have string input variables allowed_comparators and allowed_operators. kwargs: Additional named params to pass to FewShotPromptTemplate init. @@ -289,9 +289,10 @@ def load_query_constructor_chain( attribute_info: Sequence of attributes in the document. examples: Optional list of examples to use for the chain. allowed_comparators: Sequence of allowed comparators. Defaults to all - Comparators. - allowed_operators: Sequence of allowed operators. Defaults to all Operators. - enable_limit: Whether to enable the limit operator. Defaults to `False`. + `Comparator` objects. + allowed_operators: Sequence of allowed operators. Defaults to all `Operator` + objects. + enable_limit: Whether to enable the limit operator. schema_prompt: Prompt for describing query schema. Should have string input variables allowed_comparators and allowed_operators. **kwargs: Arbitrary named params to pass to LLMChain. @@ -344,9 +345,10 @@ def load_query_constructor_runnable( attribute_info: Sequence of attributes in the document. examples: Optional list of examples to use for the chain. allowed_comparators: Sequence of allowed comparators. Defaults to all - Comparators. - allowed_operators: Sequence of allowed operators. Defaults to all Operators. - enable_limit: Whether to enable the limit operator. Defaults to `False`. + `Comparator` objects. + allowed_operators: Sequence of allowed operators. Defaults to all `Operator` + objects. + enable_limit: Whether to enable the limit operator. schema_prompt: Prompt for describing query schema. Should have string input variables allowed_comparators and allowed_operators. fix_invalid: Whether to fix invalid filter directives by ignoring invalid diff --git a/libs/langchain/langchain_classic/chains/query_constructor/parser.py b/libs/langchain/langchain_classic/chains/query_constructor/parser.py index d7e4de680e6..2bc4d3a05c1 100644 --- a/libs/langchain/langchain_classic/chains/query_constructor/parser.py +++ b/libs/langchain/langchain_classic/chains/query_constructor/parser.py @@ -112,7 +112,7 @@ class QueryTransformer(Transformer): args: The arguments passed to the function. Returns: - FilterDirective: The filter directive. + The filter directive. Raises: ValueError: If the function is a comparator and the first arg is not in the diff --git a/libs/langchain/langchain_classic/chains/retrieval.py b/libs/langchain/langchain_classic/chains/retrieval.py index 8eb4ff47768..5d635265cc9 100644 --- a/libs/langchain/langchain_classic/chains/retrieval.py +++ b/libs/langchain/langchain_classic/chains/retrieval.py @@ -36,9 +36,9 @@ def create_retrieval_chain( Example: ```python - # pip install -U langchain langchain-community + # pip install -U langchain langchain-openai - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI from langchain_classic.chains.combine_documents import ( create_stuff_documents_chain, ) @@ -46,9 +46,11 @@ def create_retrieval_chain( from langchain_classic import hub retrieval_qa_chat_prompt = hub.pull("langchain-ai/retrieval-qa-chat") - llm = ChatOpenAI() + model = ChatOpenAI() retriever = ... - combine_docs_chain = create_stuff_documents_chain(llm, retrieval_qa_chat_prompt) + combine_docs_chain = create_stuff_documents_chain( + model, retrieval_qa_chat_prompt + ) retrieval_chain = create_retrieval_chain(retriever, combine_docs_chain) retrieval_chain.invoke({"input": "..."}) diff --git a/libs/langchain/langchain_classic/chains/retrieval_qa/base.py b/libs/langchain/langchain_classic/chains/retrieval_qa/base.py index 39fdf46ff60..ec62a77eaa2 100644 --- a/libs/langchain/langchain_classic/chains/retrieval_qa/base.py +++ b/libs/langchain/langchain_classic/chains/retrieval_qa/base.py @@ -42,8 +42,8 @@ class BaseRetrievalQA(Chain): combine_documents_chain: BaseCombineDocumentsChain """Chain to use to combine the documents.""" - input_key: str = "query" #: :meta private: - output_key: str = "result" #: :meta private: + input_key: str = "query" + output_key: str = "result" return_source_documents: bool = False """Return the source documents or not.""" @@ -55,18 +55,12 @@ class BaseRetrievalQA(Chain): @property def input_keys(self) -> list[str]: - """Input keys. - - :meta private: - """ + """Input keys.""" return [self.input_key] @property def output_keys(self) -> list[str]: - """Output keys. - - :meta private: - """ + """Output keys.""" _output_keys = [self.output_key] if self.return_source_documents: _output_keys = [*_output_keys, "source_documents"] @@ -237,7 +231,7 @@ class RetrievalQA(BaseRetrievalQA): retriever = ... # Your retriever - llm = ChatOpenAI() + model = ChatOpenAI() system_prompt = ( "Use the given context to answer the question. " @@ -251,7 +245,7 @@ class RetrievalQA(BaseRetrievalQA): ("human", "{input}"), ] ) - question_answer_chain = create_stuff_documents_chain(llm, prompt) + question_answer_chain = create_stuff_documents_chain(model, prompt) chain = create_retrieval_chain(retriever, question_answer_chain) chain.invoke({"input": query}) @@ -259,7 +253,7 @@ class RetrievalQA(BaseRetrievalQA): Example: ```python - from langchain_community.llms import OpenAI + from langchain_openai import OpenAI from langchain_classic.chains import RetrievalQA from langchain_community.vectorstores import FAISS from langchain_core.vectorstores import VectorStoreRetriever diff --git a/libs/langchain/langchain_classic/chains/router/base.py b/libs/langchain/langchain_classic/chains/router/base.py index ea078cb037b..0a0ca8cc024 100644 --- a/libs/langchain/langchain_classic/chains/router/base.py +++ b/libs/langchain/langchain_classic/chains/router/base.py @@ -73,8 +73,7 @@ class MultiRouteChain(Chain): default_chain: Chain """Default chain to use when none of the destination chains are suitable.""" silent_errors: bool = False - """If `True`, use default_chain when an invalid destination name is provided. - Defaults to `False`.""" + """If `True`, use default_chain when an invalid destination name is provided.""" model_config = ConfigDict( arbitrary_types_allowed=True, @@ -83,18 +82,12 @@ class MultiRouteChain(Chain): @property def input_keys(self) -> list[str]: - """Will be whatever keys the router chain prompt expects. - - :meta private: - """ + """Will be whatever keys the router chain prompt expects.""" return self.router_chain.input_keys @property def output_keys(self) -> list[str]: - """Will always return text key. - - :meta private: - """ + """Will always return text key.""" return [] def _call( diff --git a/libs/langchain/langchain_classic/chains/router/embedding_router.py b/libs/langchain/langchain_classic/chains/router/embedding_router.py index a519fabc2a3..60d986c1468 100644 --- a/libs/langchain/langchain_classic/chains/router/embedding_router.py +++ b/libs/langchain/langchain_classic/chains/router/embedding_router.py @@ -29,10 +29,7 @@ class EmbeddingRouterChain(RouterChain): @property def input_keys(self) -> list[str]: - """Will be whatever keys the LLM chain prompt expects. - - :meta private: - """ + """Will be whatever keys the LLM chain prompt expects.""" return self.routing_keys @override diff --git a/libs/langchain/langchain_classic/chains/router/llm_router.py b/libs/langchain/langchain_classic/chains/router/llm_router.py index 6fa390a2f12..8d93789a1b6 100644 --- a/libs/langchain/langchain_classic/chains/router/llm_router.py +++ b/libs/langchain/langchain_classic/chains/router/llm_router.py @@ -48,7 +48,7 @@ class LLMRouterChain(RouterChain): from langchain_core.runnables import RunnableLambda, RunnablePassthrough from langchain_openai import ChatOpenAI - llm = ChatOpenAI(model="gpt-4o-mini") + model = ChatOpenAI(model="gpt-4o-mini") prompt_1 = ChatPromptTemplate.from_messages( [ @@ -63,8 +63,8 @@ class LLMRouterChain(RouterChain): ] ) - chain_1 = prompt_1 | llm | StrOutputParser() - chain_2 = prompt_2 | llm | StrOutputParser() + chain_1 = prompt_1 | model | StrOutputParser() + chain_2 = prompt_2 | model | StrOutputParser() route_system = "Route the user's query to either the animal " "or vegetable expert." @@ -83,7 +83,7 @@ class LLMRouterChain(RouterChain): route_chain = ( route_prompt - | llm.with_structured_output(RouteQuery) + | model.with_structured_output(RouteQuery) | itemgetter("destination") ) @@ -118,10 +118,7 @@ class LLMRouterChain(RouterChain): @property def input_keys(self) -> list[str]: - """Will be whatever keys the LLM chain prompt expects. - - :meta private: - """ + """Will be whatever keys the LLM chain prompt expects.""" return self.llm_chain.input_keys def _validate_outputs(self, outputs: dict[str, Any]) -> None: diff --git a/libs/langchain/langchain_classic/chains/router/multi_prompt.py b/libs/langchain/langchain_classic/chains/router/multi_prompt.py index 2c13ec476d4..52c3eac6a1e 100644 --- a/libs/langchain/langchain_classic/chains/router/multi_prompt.py +++ b/libs/langchain/langchain_classic/chains/router/multi_prompt.py @@ -49,7 +49,7 @@ class MultiPromptChain(MultiRouteChain): from langgraph.graph import END, START, StateGraph from typing_extensions import TypedDict - llm = ChatOpenAI(model="gpt-4o-mini") + model = ChatOpenAI(model="gpt-4o-mini") # Define the prompts we will route to prompt_1 = ChatPromptTemplate.from_messages( @@ -68,8 +68,8 @@ class MultiPromptChain(MultiRouteChain): # Construct the chains we will route to. These format the input query # into the respective prompt, run it through a chat model, and cast # the result to a string. - chain_1 = prompt_1 | llm | StrOutputParser() - chain_2 = prompt_2 | llm | StrOutputParser() + chain_1 = prompt_1 | model | StrOutputParser() + chain_2 = prompt_2 | model | StrOutputParser() # Next: define the chain that selects which branch to route to. @@ -92,7 +92,7 @@ class MultiPromptChain(MultiRouteChain): destination: Literal["animal", "vegetable"] - route_chain = route_prompt | llm.with_structured_output(RouteQuery) + route_chain = route_prompt | model.with_structured_output(RouteQuery) # For LangGraph, we will define the state of the graph to hold the query, diff --git a/libs/langchain/langchain_classic/chains/router/multi_retrieval_qa.py b/libs/langchain/langchain_classic/chains/router/multi_retrieval_qa.py index bf710cd8040..7f9d743f0f8 100644 --- a/libs/langchain/langchain_classic/chains/router/multi_retrieval_qa.py +++ b/libs/langchain/langchain_classic/chains/router/multi_retrieval_qa.py @@ -117,7 +117,7 @@ class MultiRetrievalQAChain(MultiRouteChain): "default LLMs on behalf of users." "You can provide a conversation LLM like so:\n" "from langchain_openai import ChatOpenAI\n" - "llm = ChatOpenAI()" + "model = ChatOpenAI()" ) raise NotImplementedError(msg) _default_chain = ConversationChain( diff --git a/libs/langchain/langchain_classic/chains/sequential.py b/libs/langchain/langchain_classic/chains/sequential.py index 78b576fc6a1..3d333b52b53 100644 --- a/libs/langchain/langchain_classic/chains/sequential.py +++ b/libs/langchain/langchain_classic/chains/sequential.py @@ -18,7 +18,7 @@ class SequentialChain(Chain): chains: list[Chain] input_variables: list[str] - output_variables: list[str] #: :meta private: + output_variables: list[str] return_all: bool = False model_config = ConfigDict( @@ -28,18 +28,12 @@ class SequentialChain(Chain): @property def input_keys(self) -> list[str]: - """Return expected input keys to the chain. - - :meta private: - """ + """Return expected input keys to the chain.""" return self.input_variables @property def output_keys(self) -> list[str]: - """Return output key. - - :meta private: - """ + """Return output key.""" return self.output_variables @model_validator(mode="before") @@ -131,8 +125,8 @@ class SimpleSequentialChain(Chain): chains: list[Chain] strip_outputs: bool = False - input_key: str = "input" #: :meta private: - output_key: str = "output" #: :meta private: + input_key: str = "input" + output_key: str = "output" model_config = ConfigDict( arbitrary_types_allowed=True, @@ -141,18 +135,12 @@ class SimpleSequentialChain(Chain): @property def input_keys(self) -> list[str]: - """Expect input key. - - :meta private: - """ + """Expect input key.""" return [self.input_key] @property def output_keys(self) -> list[str]: - """Return output key. - - :meta private: - """ + """Return output key.""" return [self.output_key] @model_validator(mode="after") diff --git a/libs/langchain/langchain_classic/chains/sql_database/query.py b/libs/langchain/langchain_classic/chains/sql_database/query.py index 9b10b704b62..05667e8dba1 100644 --- a/libs/langchain/langchain_classic/chains/sql_database/query.py +++ b/libs/langchain/langchain_classic/chains/sql_database/query.py @@ -53,16 +53,15 @@ def create_sql_query_chain( Control access to who can submit requests to this chain. - See https://python.langchain.com/docs/security for more information. + See https://docs.langchain.com/oss/python/security-policy for more information. Args: llm: The language model to use. db: The SQLDatabase to generate the query for. prompt: The prompt to use. If none is provided, will choose one - based on dialect. Defaults to `None`. See Prompt section below for more. - k: The number of results per select statement to return. Defaults to 5. + based on dialect. See Prompt section below for more. + k: The number of results per select statement to return. get_col_comments: Whether to retrieve column comments along with table info. - Defaults to `False`. Returns: A chain that takes in a question and generates a SQL query that answers @@ -76,8 +75,8 @@ def create_sql_query_chain( from langchain_community.utilities import SQLDatabase db = SQLDatabase.from_uri("sqlite:///Chinook.db") - llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0) - chain = create_sql_query_chain(llm, db) + model = ChatOpenAI(model="gpt-3.5-turbo", temperature=0) + chain = create_sql_query_chain(model, db) response = chain.invoke({"question": "How many employees are there"}) ``` diff --git a/libs/langchain/langchain_classic/chains/structured_output/base.py b/libs/langchain/langchain_classic/chains/structured_output/base.py index 9d99cb33f9c..93bd6156d55 100644 --- a/libs/langchain/langchain_classic/chains/structured_output/base.py +++ b/libs/langchain/langchain_classic/chains/structured_output/base.py @@ -34,7 +34,7 @@ from pydantic import BaseModel "LangChain has introduced a method called `with_structured_output` that " "is available on ChatModels capable of tool calling. " "You can read more about the method here: " - ". " + ". " "Please follow our extraction use case documentation for more guidelines " "on how to do information extraction with LLMs. " ". " @@ -53,12 +53,12 @@ from pydantic import BaseModel punchline: str = Field(description="The punchline to the joke") # Or any other chat model that supports tools. - # Please reference to to the documentation of structured_output + # Please reference to the documentation of structured_output # to see an up to date list of which models support # with_structured_output. - model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0) - structured_llm = model.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats. + model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0) + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats. Make sure to call the Joke function.") """ ), @@ -127,9 +127,9 @@ def create_openai_fn_runnable( fav_food: str | None = Field(None, description="The dog's favorite food") - llm = ChatOpenAI(model="gpt-4", temperature=0) - structured_llm = create_openai_fn_runnable([RecordPerson, RecordDog], llm) - structured_llm.invoke("Harry was a chubby brown beagle who loved chicken) + model = ChatOpenAI(model="gpt-4", temperature=0) + structured_model = create_openai_fn_runnable([RecordPerson, RecordDog], model) + structured_model.invoke("Harry was a chubby brown beagle who loved chicken) # -> RecordDog(name="Harry", color="brown", fav_food="chicken") ``` @@ -153,7 +153,7 @@ def create_openai_fn_runnable( "LangChain has introduced a method called `with_structured_output` that " "is available on ChatModels capable of tool calling. " "You can read more about the method here: " - "." + "." "Please follow our extraction use case documentation for more guidelines " "on how to do information extraction with LLMs. " ". " @@ -172,12 +172,12 @@ def create_openai_fn_runnable( punchline: str = Field(description="The punchline to the joke") # Or any other chat model that supports tools. - # Please reference to to the documentation of structured_output + # Please reference to the documentation of structured_output # to see an up to date list of which models support # with_structured_output. - model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0) - structured_llm = model.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats. + model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0) + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats. Make sure to call the Joke function.") """ ), @@ -250,21 +250,21 @@ def create_structured_output_runnable( color: str = Field(..., description="The dog's color") fav_food: str | None = Field(None, description="The dog's favorite food") - llm = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) + model = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) prompt = ChatPromptTemplate.from_messages( [ ("system", "You are an extraction algorithm. Please extract every possible instance"), ('human', '{input}') ] ) - structured_llm = create_structured_output_runnable( + structured_model = create_structured_output_runnable( RecordDog, - llm, + model, mode="openai-tools", enforce_function_usage=True, return_single=True ) - structured_llm.invoke({"input": "Harry was a chubby brown beagle who loved chicken"}) + structured_model.invoke({"input": "Harry was a chubby brown beagle who loved chicken"}) # -> RecordDog(name="Harry", color="brown", fav_food="chicken") ``` @@ -303,15 +303,15 @@ def create_structured_output_runnable( } - llm = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) - structured_llm = create_structured_output_runnable( + model = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) + structured_model = create_structured_output_runnable( dog_schema, - llm, + model, mode="openai-tools", enforce_function_usage=True, return_single=True ) - structured_llm.invoke("Harry was a chubby brown beagle who loved chicken") + structured_model.invoke("Harry was a chubby brown beagle who loved chicken") # -> {'name': 'Harry', 'color': 'brown', 'fav_food': 'chicken'} ``` @@ -330,9 +330,9 @@ def create_structured_output_runnable( color: str = Field(..., description="The dog's color") fav_food: str | None = Field(None, description="The dog's favorite food") - llm = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) - structured_llm = create_structured_output_runnable(Dog, llm, mode="openai-functions") - structured_llm.invoke("Harry was a chubby brown beagle who loved chicken") + model = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) + structured_model = create_structured_output_runnable(Dog, model, mode="openai-functions") + structured_model.invoke("Harry was a chubby brown beagle who loved chicken") # -> Dog(name="Harry", color="brown", fav_food="chicken") ``` @@ -352,13 +352,13 @@ def create_structured_output_runnable( color: str = Field(..., description="The dog's color") fav_food: str | None = Field(None, description="The dog's favorite food") - llm = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) - structured_llm = create_structured_output_runnable(Dog, llm, mode="openai-functions") + model = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) + structured_model = create_structured_output_runnable(Dog, model, mode="openai-functions") system = '''Extract information about any dogs mentioned in the user input.''' prompt = ChatPromptTemplate.from_messages( [("system", system), ("human", "{input}"),] ) - chain = prompt | structured_llm + chain = prompt | structured_model chain.invoke({"input": "Harry was a chubby brown beagle who loved chicken"}) # -> Dog(name="Harry", color="brown", fav_food="chicken") ``` @@ -379,8 +379,8 @@ def create_structured_output_runnable( color: str = Field(..., description="The dog's color") fav_food: str | None = Field(None, description="The dog's favorite food") - llm = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) - structured_llm = create_structured_output_runnable(Dog, llm, mode="openai-json") + model = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) + structured_model = create_structured_output_runnable(Dog, model, mode="openai-json") system = '''You are a world class assistant for extracting information in structured JSON formats. \ Extract a valid JSON blob from the user input that matches the following JSON Schema: @@ -389,7 +389,7 @@ def create_structured_output_runnable( prompt = ChatPromptTemplate.from_messages( [("system", system), ("human", "{input}"),] ) - chain = prompt | structured_llm + chain = prompt | structured_model chain.invoke({"input": "Harry was a chubby brown beagle who loved chicken"}) ``` diff --git a/libs/langchain/langchain_classic/chains/summarize/chain.py b/libs/langchain/langchain_classic/chains/summarize/chain.py index 8b712d06cb9..08f82329bee 100644 --- a/libs/langchain/langchain_classic/chains/summarize/chain.py +++ b/libs/langchain/langchain_classic/chains/summarize/chain.py @@ -47,10 +47,10 @@ def _load_stuff_chain( Args: llm: Language Model to use in the chain. prompt: Prompt template that controls how the documents are formatted and - passed into the LLM. Defaults to `stuff_prompt.PROMPT`. + passed into the LLM. document_variable_name: Variable name in the prompt template where the - document text will be inserted. Defaults to "text". - verbose: Whether to log progress and intermediate steps. Defaults to `None`. + document text will be inserted. + verbose: Whether to log progress and intermediate steps. **kwargs: Additional keyword arguments passed to the StuffDocumentsChain. Returns: @@ -103,26 +103,23 @@ def _load_map_reduce_chain( Args: llm: Language Model to use for map and reduce steps. - map_prompt: Prompt used to summarize each documnet in the map step. - Defaults to `map_reduce_prompt.PROMPT`. + map_prompt: Prompt used to summarize each document in the map step. combine_prompt: Prompt used to combine summaries in the reduce step. - Defaults to `map_reduce_prompt.PROMPT`. combine_document_variable_name: Variable name in the `combine_prompt` where - the mapped summaries are inserted. Defaults to "text". + the mapped summaries are inserted. map_reduce_document_variable_name: Variable name in the `map_prompt` - where document text is inserted. Defaults to "text". + where document text is inserted. collapse_prompt: Optional prompt used to collapse intermediate summaries - if they exceed the token limit (`token_max`). Defaults to `None`. - reduce_llm: Optional separate LLM for the reduce step. Defaults to `None`, + if they exceed the token limit (`token_max`). + reduce_llm: Optional separate LLM for the reduce step. which uses the same model as the map step. - collapse_llm: Optional separate LLM for the collapse step. Defaults to `None`, + collapse_llm: Optional separate LLM for the collapse step. which uses the same model as the map step. - verbose: Whether to log progess and intermediate steps. Defaults to `None`. + verbose: Whether to log progress and intermediate steps. token_max: Token threshold that triggers the collapse step during reduction. - Defaults to 3000. - callbacks: Optional callbacks for logging and tracing. Defaults to `None`. + callbacks: Optional callbacks for logging and tracing. collapse_max_retries: Maximum retries for the collapse step if it fails. - Defaults to `None`. + **kwargs: Additional keyword arguments passed to the MapReduceDocumentsChain. Returns: diff --git a/libs/langchain/langchain_classic/chains/transform.py b/libs/langchain/langchain_classic/chains/transform.py index 709ec348dde..a56273f1e50 100644 --- a/libs/langchain/langchain_classic/chains/transform.py +++ b/libs/langchain/langchain_classic/chains/transform.py @@ -43,26 +43,17 @@ class TransformChain(Chain): @staticmethod @functools.lru_cache def _log_once(msg: str) -> None: - """Log a message once. - - :meta private: - """ + """Log a message once.""" logger.warning(msg) @property def input_keys(self) -> list[str]: - """Expect input keys. - - :meta private: - """ + """Expect input keys.""" return self.input_variables @property def output_keys(self) -> list[str]: - """Return output keys. - - :meta private: - """ + """Return output keys.""" return self.output_variables @override diff --git a/libs/langchain/langchain_classic/chat_models/base.py b/libs/langchain/langchain_classic/chat_models/base.py index 20a888e9166..b22b74ae306 100644 --- a/libs/langchain/langchain_classic/chat_models/base.py +++ b/libs/langchain/langchain_classic/chat_models/base.py @@ -76,173 +76,204 @@ def init_chat_model( config_prefix: str | None = None, **kwargs: Any, ) -> BaseChatModel | _ConfigurableModel: - """Initialize a ChatModel in a single line using the model's name and provider. + """Initialize a chat model from any supported provider using a unified interface. + + **Two main use cases:** + + 1. **Fixed model** – specify the model upfront and get back a ready-to-use chat + model. + 2. **Configurable model** – choose to specify parameters (including model name) at + runtime via `config`. Makes it easy to switch between models/providers without + changing your code !!! note - Must have the integration package corresponding to the model provider installed. - You should look at the [provider integration's API reference](https://docs.langchain.com/oss/python/integrations/providers) - to see what parameters are supported by the model. + Requires the integration package for the chosen model provider to be installed. + + See the `model_provider` parameter below for specific package names + (e.g., `pip install langchain-openai`). + + Refer to the [provider integration's API reference](https://docs.langchain.com/oss/python/integrations/providers) + for supported model parameters to use as `**kwargs`. Args: - model: The name of the model, e.g. `'o3-mini'`, `'claude-3-5-sonnet-latest'`. You can - also specify model and model provider in a single argument using + model: The name or ID of the model, e.g. `'o3-mini'`, `'claude-sonnet-4-5-20250929'`. + + You can also specify model and model provider in a single argument using `'{model_provider}:{model}'` format, e.g. `'openai:o1'`. - model_provider: The model provider if not specified as part of model arg (see - above). Supported model_provider values and the corresponding integration - package are: + model_provider: The model provider if not specified as part of the model arg + (see above). - - `openai` -> `langchain-openai` - - `anthropic` -> `langchain-anthropic` - - `azure_openai` -> `langchain-openai` - - `azure_ai` -> `langchain-azure-ai` - - `google_vertexai` -> `langchain-google-vertexai` - - `google_genai` -> `langchain-google-genai` - - `bedrock` -> `langchain-aws` - - `bedrock_converse` -> `langchain-aws` - - `cohere` -> `langchain-cohere` - - `fireworks` -> `langchain-fireworks` - - `together` -> `langchain-together` - - `mistralai` -> `langchain-mistralai` - - `huggingface` -> `langchain-huggingface` - - `groq` -> `langchain-groq` - - `ollama` -> `langchain-ollama` - - `google_anthropic_vertex` -> `langchain-google-vertexai` - - `deepseek` -> `langchain-deepseek` - - `ibm` -> `langchain-ibm` - - `nvidia` -> `langchain-nvidia-ai-endpoints` - - `xai` -> `langchain-xai` - - `perplexity` -> `langchain-perplexity` + Supported `model_provider` values and the corresponding integration package + are: - Will attempt to infer model_provider from model if not specified. The + - `openai` -> [`langchain-openai`](https://docs.langchain.com/oss/python/integrations/providers/openai) + - `anthropic` -> [`langchain-anthropic`](https://docs.langchain.com/oss/python/integrations/providers/anthropic) + - `azure_openai` -> [`langchain-openai`](https://docs.langchain.com/oss/python/integrations/providers/openai) + - `azure_ai` -> [`langchain-azure-ai`](https://docs.langchain.com/oss/python/integrations/providers/microsoft) + - `google_vertexai` -> [`langchain-google-vertexai`](https://docs.langchain.com/oss/python/integrations/providers/google) + - `google_genai` -> [`langchain-google-genai`](https://docs.langchain.com/oss/python/integrations/providers/google) + - `bedrock` -> [`langchain-aws`](https://docs.langchain.com/oss/python/integrations/providers/aws) + - `bedrock_converse` -> [`langchain-aws`](https://docs.langchain.com/oss/python/integrations/providers/aws) + - `cohere` -> [`langchain-cohere`](https://docs.langchain.com/oss/python/integrations/providers/cohere) + - `fireworks` -> [`langchain-fireworks`](https://docs.langchain.com/oss/python/integrations/providers/fireworks) + - `together` -> [`langchain-together`](https://docs.langchain.com/oss/python/integrations/providers/together) + - `mistralai` -> [`langchain-mistralai`](https://docs.langchain.com/oss/python/integrations/providers/mistralai) + - `huggingface` -> [`langchain-huggingface`](https://docs.langchain.com/oss/python/integrations/providers/huggingface) + - `groq` -> [`langchain-groq`](https://docs.langchain.com/oss/python/integrations/providers/groq) + - `ollama` -> [`langchain-ollama`](https://docs.langchain.com/oss/python/integrations/providers/ollama) + - `google_anthropic_vertex` -> [`langchain-google-vertexai`](https://docs.langchain.com/oss/python/integrations/providers/google) + - `deepseek` -> [`langchain-deepseek`](https://docs.langchain.com/oss/python/integrations/providers/deepseek) + - `ibm` -> [`langchain-ibm`](https://docs.langchain.com/oss/python/integrations/providers/deepseek) + - `nvidia` -> [`langchain-nvidia-ai-endpoints`](https://docs.langchain.com/oss/python/integrations/providers/nvidia) + - `xai` -> [`langchain-xai`](https://docs.langchain.com/oss/python/integrations/providers/xai) + - `perplexity` -> [`langchain-perplexity`](https://docs.langchain.com/oss/python/integrations/providers/perplexity) + + Will attempt to infer `model_provider` from model if not specified. The following providers will be inferred based on these model prefixes: - `gpt-...` | `o1...` | `o3...` -> `openai` - - `claude...` -> `anthropic` - - `amazon...` -> `bedrock` - - `gemini...` -> `google_vertexai` - - `command...` -> `cohere` - - `accounts/fireworks...` -> `fireworks` - - `mistral...` -> `mistralai` - - `deepseek...` -> `deepseek` - - `grok...` -> `xai` - - `sonar...` -> `perplexity` - configurable_fields: Which model parameters are configurable: + - `claude...` -> `anthropic` + - `amazon...` -> `bedrock` + - `gemini...` -> `google_vertexai` + - `command...` -> `cohere` + - `accounts/fireworks...` -> `fireworks` + - `mistral...` -> `mistralai` + - `deepseek...` -> `deepseek` + - `grok...` -> `xai` + - `sonar...` -> `perplexity` + configurable_fields: Which model parameters are configurable at runtime: - - None: No configurable fields. - - `'any'`: All fields are configurable. **See Security Note below.** - - Union[List[str], Tuple[str, ...]]: Specified fields are configurable. + - `None`: No configurable fields (i.e., a fixed model). + - `'any'`: All fields are configurable. **See security note below.** + - `list[str] | Tuple[str, ...]`: Specified fields are configurable. - Fields are assumed to have config_prefix stripped if there is a - config_prefix. If model is specified, then defaults to None. If model is - not specified, then defaults to `("model", "model_provider")`. + Fields are assumed to have `config_prefix` stripped if a `config_prefix` is + specified. - ***Security Note***: Setting `configurable_fields="any"` means fields like - `api_key`, `base_url`, etc. can be altered at runtime, potentially redirecting - model requests to a different service/user. Make sure that if you're - accepting untrusted configurations that you enumerate the - `configurable_fields=(...)` explicitly. + If `model` is specified, then defaults to `None`. - config_prefix: If `'config_prefix'` is a non-empty string then model will be - configurable at runtime via the - `config["configurable"]["{config_prefix}_{param}"]` keys. If - `'config_prefix'` is an empty string then model will be configurable via + If `model` is not specified, then defaults to `("model", "model_provider")`. + + !!! warning "Security note" + Setting `configurable_fields="any"` means fields like `api_key`, + `base_url`, etc., can be altered at runtime, potentially redirecting + model requests to a different service/user. + + Make sure that if you're accepting untrusted configurations that you + enumerate the `configurable_fields=(...)` explicitly. + + config_prefix: Optional prefix for configuration keys. + + Useful when you have multiple configurable models in the same application. + + If `'config_prefix'` is a non-empty string then `model` will be configurable + at runtime via the `config["configurable"]["{config_prefix}_{param}"]` keys. + See examples below. + + If `'config_prefix'` is an empty string then model will be configurable via `config["configurable"]["{param}"]`. - temperature: Model temperature. - max_tokens: Max output tokens. - timeout: The maximum time (in seconds) to wait for a response from the model - before canceling the request. - max_retries: The maximum number of attempts the system will make to resend a - request if it fails due to issues like network timeouts or rate limits. - base_url: The URL of the API endpoint where requests are sent. - rate_limiter: A `BaseRateLimiter` to space out requests to avoid exceeding - rate limits. - kwargs: Additional model-specific keyword args to pass to - `<>.__init__(model=model_name, **kwargs)`. + **kwargs: Additional model-specific keyword args to pass to the underlying + chat model's `__init__` method. Common parameters include: + + - `temperature`: Model temperature for controlling randomness. + - `max_tokens`: Maximum number of output tokens. + - `timeout`: Maximum time (in seconds) to wait for a response. + - `max_retries`: Maximum number of retry attempts for failed requests. + - `base_url`: Custom API endpoint URL. + - `rate_limiter`: A + [`BaseRateLimiter`][langchain_core.rate_limiters.BaseRateLimiter] + instance to control request rate. + + Refer to the specific model provider's + [integration reference](https://reference.langchain.com/python/integrations/) + for all available parameters. Returns: - A BaseChatModel corresponding to the model_name and model_provider specified if - configurability is inferred to be False. If configurable, a chat model emulator - that initializes the underlying model at runtime once a config is passed in. + A [`BaseChatModel`][langchain_core.language_models.BaseChatModel] corresponding + to the `model_name` and `model_provider` specified if configurability is + inferred to be `False`. If configurable, a chat model emulator that + initializes the underlying model at runtime once a config is passed in. Raises: - ValueError: If model_provider cannot be inferred or isn't supported. + ValueError: If `model_provider` cannot be inferred or isn't supported. ImportError: If the model provider integration package is not installed. - ???+ note "Init non-configurable model" + ???+ example "Initialize a non-configurable model" ```python # pip install langchain langchain-openai langchain-anthropic langchain-google-vertexai + from langchain_classic.chat_models import init_chat_model o3_mini = init_chat_model("openai:o3-mini", temperature=0) - claude_sonnet = init_chat_model( - "anthropic:claude-3-5-sonnet-latest", temperature=0 - ) - gemini_2_flash = init_chat_model( + claude_sonnet = init_chat_model("anthropic:claude-sonnet-4-5-20250929", temperature=0) + gemini_2-5_flash = init_chat_model( "google_vertexai:gemini-2.5-flash", temperature=0 ) o3_mini.invoke("what's your name") claude_sonnet.invoke("what's your name") - gemini_2_flash.invoke("what's your name") + gemini_2-5_flash.invoke("what's your name") ``` - ??? note "Partially configurable model with no default" + ??? example "Partially configurable model with no default" ```python # pip install langchain langchain-openai langchain-anthropic + from langchain_classic.chat_models import init_chat_model - # We don't need to specify configurable=True if a model isn't specified. + # (We don't need to specify configurable=True if a model isn't specified.) configurable_model = init_chat_model(temperature=0) configurable_model.invoke( "what's your name", config={"configurable": {"model": "gpt-4o"}} ) - # GPT-4o response + # Use GPT-4o to generate the response configurable_model.invoke( "what's your name", - config={"configurable": {"model": "claude-3-5-sonnet-latest"}}, + config={"configurable": {"model": "claude-sonnet-4-5-20250929"}}, ) - # claude-3.5 sonnet response ``` - ??? note "Fully configurable model with a default" + ??? example "Fully configurable model with a default" ```python # pip install langchain langchain-openai langchain-anthropic + from langchain_classic.chat_models import init_chat_model configurable_model_with_default = init_chat_model( "openai:gpt-4o", - configurable_fields="any", # this allows us to configure other params like temperature, max_tokens, etc at runtime. + configurable_fields="any", # This allows us to configure other params like temperature, max_tokens, etc at runtime. config_prefix="foo", temperature=0, ) configurable_model_with_default.invoke("what's your name") - # GPT-4o response with temperature 0 + # GPT-4o response with temperature 0 (as set in default) configurable_model_with_default.invoke( "what's your name", config={ "configurable": { - "foo_model": "anthropic:claude-3-5-sonnet-latest", + "foo_model": "anthropic:claude-sonnet-4-5-20250929", "foo_temperature": 0.6, } }, ) - # Claude-3.5 sonnet response with temperature 0.6 + # Override default to use Sonnet 4.5 with temperature 0.6 to generate response ``` - ??? note "Bind tools to a configurable model" + ??? example "Bind tools to a configurable model" - You can call any ChatModel declarative methods on a configurable model in the - same way that you would with a normal model. + You can call any chat model declarative methods on a configurable model in the + same way that you would with a normal model: ```python # pip install langchain langchain-openai langchain-anthropic + from langchain_classic.chat_models import init_chat_model from pydantic import BaseModel, Field @@ -276,33 +307,31 @@ def init_chat_model( configurable_model_with_tools.invoke( "Which city is hotter today and which is bigger: LA or NY?" ) - # GPT-4o response with tool calls + # Use GPT-4o configurable_model_with_tools.invoke( "Which city is hotter today and which is bigger: LA or NY?", - config={"configurable": {"model": "claude-3-5-sonnet-latest"}}, + config={"configurable": {"model": "claude-sonnet-4-5-20250929"}}, ) - # Claude-3.5 sonnet response with tools + # Use Sonnet 4.5 ``` - !!! version-added "Added in version 0.2.7" - - !!! warning "Behavior changed in 0.2.8" + !!! warning "Behavior changed in `langchain` 0.2.8" Support for `configurable_fields` and `config_prefix` added. - !!! warning "Behavior changed in 0.2.12" + !!! warning "Behavior changed in `langchain` 0.2.12" Support for Ollama via langchain-ollama package added - (langchain_ollama.ChatOllama). Previously, + (`langchain_ollama.ChatOllama`). Previously, the now-deprecated langchain-community version of Ollama was imported - (langchain_community.chat_models.ChatOllama). + (`langchain_community.chat_models.ChatOllama`). Support for AWS Bedrock models via the Converse API added - (model_provider="bedrock_converse"). + (`model_provider="bedrock_converse"`). - !!! warning "Behavior changed in 0.3.5" + !!! warning "Behavior changed in `langchain` 0.3.5" Out of beta. - !!! warning "Behavior changed in 0.3.19" + !!! warning "Behavior changed in `langchain` 0.3.19" Support for Deepseek, IBM, Nvidia, and xAI models added. """ # noqa: E501 @@ -406,11 +435,14 @@ def _init_chat_model_helper( from langchain_mistralai import ChatMistralAI return ChatMistralAI(model=model, **kwargs) # type: ignore[call-arg,unused-ignore] + if model_provider == "huggingface": _check_pkg("langchain_huggingface") - from langchain_huggingface import ChatHuggingFace + from langchain_huggingface import ChatHuggingFace, HuggingFacePipeline + + llm = HuggingFacePipeline.from_model_id(model_id=model, **kwargs) + return ChatHuggingFace(llm=llm) - return ChatHuggingFace(model_id=model, **kwargs) if model_provider == "groq": _check_pkg("langchain_groq") from langchain_groq import ChatGroq @@ -622,7 +654,7 @@ class _ConfigurableModel(Runnable[LanguageModelInput, Any]): config: RunnableConfig | None = None, **kwargs: Any, ) -> _ConfigurableModel: - """Bind config to a Runnable, returning a new Runnable.""" + """Bind config to a `Runnable`, returning a new `Runnable`.""" config = RunnableConfig(**(config or {}), **cast("RunnableConfig", kwargs)) model_params = self._model_params(config) remaining_config = {k: v for k, v in config.items() if k != "configurable"} diff --git a/libs/langchain/langchain_classic/embeddings/__init__.py b/libs/langchain/langchain_classic/embeddings/__init__.py index c10293c4574..c76d24b0e97 100644 --- a/libs/langchain/langchain_classic/embeddings/__init__.py +++ b/libs/langchain/langchain_classic/embeddings/__init__.py @@ -70,34 +70,12 @@ if TYPE_CHECKING: XinferenceEmbeddings, ) + from langchain_classic.chains.hyde.base import HypotheticalDocumentEmbedder + logger = logging.getLogger(__name__) -# TODO: this is in here to maintain backwards compatibility -class HypotheticalDocumentEmbedder: - def __init__(self, *args: Any, **kwargs: Any): - logger.warning( - "Using a deprecated class. Please use " - "`from langchain_classic.chains import " - "HypotheticalDocumentEmbedder` instead", - ) - from langchain_classic.chains.hyde.base import HypotheticalDocumentEmbedder as H - - return H(*args, **kwargs) # type: ignore[return-value] # noqa: PLE0101 - - @classmethod - def from_llm(cls, *args: Any, **kwargs: Any) -> Any: - logger.warning( - "Using a deprecated class. Please use " - "`from langchain_classic.chains import " - "HypotheticalDocumentEmbedder` instead", - ) - from langchain_classic.chains.hyde.base import HypotheticalDocumentEmbedder as H - - return H.from_llm(*args, **kwargs) - - # Create a way to dynamically look up deprecated imports. # Used to consolidate logic for raising deprecation warnings and # handling optional imports. @@ -128,6 +106,7 @@ DEPRECATED_LOOKUP = { "HuggingFaceHubEmbeddings": "langchain_community.embeddings", "HuggingFaceInferenceAPIEmbeddings": "langchain_community.embeddings", "HuggingFaceInstructEmbeddings": "langchain_community.embeddings", + "HypotheticalDocumentEmbedder": "langchain_classic.chains.hyde.base", "InfinityEmbeddings": "langchain_community.embeddings", "JavelinAIGatewayEmbeddings": "langchain_community.embeddings", "JinaEmbeddings": "langchain_community.embeddings", @@ -193,6 +172,7 @@ __all__ = [ "HuggingFaceHubEmbeddings", "HuggingFaceInferenceAPIEmbeddings", "HuggingFaceInstructEmbeddings", + "HypotheticalDocumentEmbedder", "InfinityEmbeddings", "JavelinAIGatewayEmbeddings", "JinaEmbeddings", diff --git a/libs/langchain/langchain_classic/embeddings/base.py b/libs/langchain/langchain_classic/embeddings/base.py index d7f4f8021eb..7fc32abc98e 100644 --- a/libs/langchain/langchain_classic/embeddings/base.py +++ b/libs/langchain/langchain_classic/embeddings/base.py @@ -137,20 +137,34 @@ def init_embeddings( installed. Args: - model: Name of the model to use. Can be either: - - A model string like "openai:text-embedding-3-small" - - Just the model name if provider is specified - provider: Optional explicit provider name. If not specified, - will attempt to parse from the model string. Supported providers - and their required packages: + model: Name of the model to use. - {_get_provider_list()} + Can be either: + + - A model string like `"openai:text-embedding-3-small"` + - Just the model name if the provider is specified separately or can be + inferred. + + See supported providers under the `provider` arg description. + provider: Optional explicit provider name. If not specified, will attempt to + parse from the model string in the `model` arg. + + Supported providers: + + - `openai` -> [`langchain-openai`](https://docs.langchain.com/oss/python/integrations/providers/openai) + - `azure_openai` -> [`langchain-openai`](https://docs.langchain.com/oss/python/integrations/providers/openai) + - `bedrock` -> [`langchain-aws`](https://docs.langchain.com/oss/python/integrations/providers/aws) + - `cohere` -> [`langchain-cohere`](https://docs.langchain.com/oss/python/integrations/providers/cohere) + - `google_vertexai` -> [`langchain-google-vertexai`](https://docs.langchain.com/oss/python/integrations/providers/google) + - `huggingface` -> [`langchain-huggingface`](https://docs.langchain.com/oss/python/integrations/providers/huggingface) + - `mistraiai` -> [`langchain-mistralai`](https://docs.langchain.com/oss/python/integrations/providers/mistralai) + - `ollama` -> [`langchain-ollama`](https://docs.langchain.com/oss/python/integrations/providers/ollama) **kwargs: Additional model-specific parameters passed to the embedding model. These vary by provider, see the provider-specific documentation for details. Returns: - An Embeddings instance that can generate embeddings for text. + An `Embeddings` instance that can generate embeddings for text. Raises: ValueError: If the model provider is not supported or cannot be determined @@ -171,7 +185,7 @@ def init_embeddings( model = init_embeddings("openai:text-embedding-3-small", api_key="sk-...") ``` - !!! version-added "Added in version 0.3.9" + !!! version-added "Added in `langchain` 0.3.9" """ if not model: diff --git a/libs/langchain/langchain_classic/embeddings/cache.py b/libs/langchain/langchain_classic/embeddings/cache.py index 98f6cf7376e..08a900a45f5 100644 --- a/libs/langchain/langchain_classic/embeddings/cache.py +++ b/libs/langchain/langchain_classic/embeddings/cache.py @@ -122,7 +122,7 @@ class CacheBackedEmbeddings(Embeddings): ```python from langchain_classic.embeddings import CacheBackedEmbeddings from langchain_classic.storage import LocalFileStore - from langchain_community.embeddings import OpenAIEmbeddings + from langchain_openai import OpenAIEmbeddings store = LocalFileStore("./my_cache") diff --git a/libs/langchain/langchain_classic/evaluation/agents/trajectory_eval_chain.py b/libs/langchain/langchain_classic/evaluation/agents/trajectory_eval_chain.py index e4cd9410d1f..f3828257785 100644 --- a/libs/langchain/langchain_classic/evaluation/agents/trajectory_eval_chain.py +++ b/libs/langchain/langchain_classic/evaluation/agents/trajectory_eval_chain.py @@ -104,7 +104,7 @@ class TrajectoryEvalChain(AgentTrajectoryEvaluator, LLMEvalChain): Example: ```python from langchain_classic.agents import AgentType, initialize_agent - from langchain_community.chat_models import ChatOpenAI + from langchain_openai import ChatOpenAI from langchain_classic.evaluation import TrajectoryEvalChain from langchain_classic.tools import tool @@ -113,10 +113,10 @@ class TrajectoryEvalChain(AgentTrajectoryEvaluator, LLMEvalChain): \"\"\"Very helpful answers to geography questions.\"\"\" return f"{country}? IDK - We may never know {question}." - llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0) + model = ChatOpenAI(model="gpt-3.5-turbo", temperature=0) agent = initialize_agent( tools=[geography_answers], - llm=llm, + llm=model, agent=AgentType.OPENAI_FUNCTIONS, return_intermediate_steps=True, ) @@ -125,7 +125,7 @@ class TrajectoryEvalChain(AgentTrajectoryEvaluator, LLMEvalChain): response = agent(question) eval_chain = TrajectoryEvalChain.from_llm( - llm=llm, agent_tools=[geography_answers], return_reasoning=True + llm=model, agent_tools=[geography_answers], return_reasoning=True ) result = eval_chain.evaluate_agent_trajectory( @@ -148,7 +148,7 @@ class TrajectoryEvalChain(AgentTrajectoryEvaluator, LLMEvalChain): default_factory=TrajectoryOutputParser, ) """The output parser used to parse the output.""" - return_reasoning: bool = False # :meta private: + return_reasoning: bool = False """DEPRECATED. Reasoning always returned.""" model_config = ConfigDict( @@ -165,7 +165,7 @@ class TrajectoryEvalChain(AgentTrajectoryEvaluator, LLMEvalChain): """Get the description of the agent tools. Returns: - str: The description of the agent tools. + The description of the agent tools. """ if self.agent_tools is None: return "" @@ -184,10 +184,10 @@ Description: {tool.description}""" """Get the agent trajectory as a formatted string. Args: - steps (Union[str, List[Tuple[AgentAction, str]]]): The agent trajectory. + steps: The agent trajectory. Returns: - str: The formatted agent trajectory. + The formatted agent trajectory. """ if isinstance(steps, str): return steps @@ -240,7 +240,7 @@ The following is the expected answer. Use this to measure correctness: **kwargs: Additional keyword arguments. Returns: - TrajectoryEvalChain: The TrajectoryEvalChain object. + The `TrajectoryEvalChain` object. """ if not isinstance(llm, BaseChatModel): msg = "Only chat models supported by the current trajectory eval" @@ -259,7 +259,7 @@ The following is the expected answer. Use this to measure correctness: """Get the input keys for the chain. Returns: - List[str]: The input keys. + The input keys. """ return ["question", "agent_trajectory", "answer", "reference"] @@ -268,7 +268,7 @@ The following is the expected answer. Use this to measure correctness: """Get the output keys for the chain. Returns: - List[str]: The output keys. + The output keys. """ return ["score", "reasoning"] @@ -289,7 +289,7 @@ The following is the expected answer. Use this to measure correctness: run_manager: The callback manager for the chain run. Returns: - Dict[str, Any]: The output values of the chain. + The output values of the chain. """ chain_input = {**inputs} if self.agent_tools: @@ -313,7 +313,7 @@ The following is the expected answer. Use this to measure correctness: run_manager: The callback manager for the chain run. Returns: - Dict[str, Any]: The output values of the chain. + The output values of the chain. """ chain_input = {**inputs} if self.agent_tools: diff --git a/libs/langchain/langchain_classic/evaluation/comparison/__init__.py b/libs/langchain/langchain_classic/evaluation/comparison/__init__.py index 790290a9e21..62148c2677b 100644 --- a/libs/langchain/langchain_classic/evaluation/comparison/__init__.py +++ b/libs/langchain/langchain_classic/evaluation/comparison/__init__.py @@ -6,7 +6,7 @@ preferences, measuring similarity / semantic equivalence between outputs, or any other comparison task. Example: - >>> from langchain_community.chat_models import ChatOpenAI + >>> from langchain_openai import ChatOpenAI >>> from langchain_classic.evaluation.comparison import PairwiseStringEvalChain >>> llm = ChatOpenAI(temperature=0) >>> chain = PairwiseStringEvalChain.from_llm(llm=llm) diff --git a/libs/langchain/langchain_classic/evaluation/comparison/eval_chain.py b/libs/langchain/langchain_classic/evaluation/comparison/eval_chain.py index 51ef0838cb9..03599b59b41 100644 --- a/libs/langchain/langchain_classic/evaluation/comparison/eval_chain.py +++ b/libs/langchain/langchain_classic/evaluation/comparison/eval_chain.py @@ -163,12 +163,12 @@ class PairwiseStringEvalChain(PairwiseStringEvaluator, LLMEvalChain, LLMChain): output_parser (BaseOutputParser): The output parser for the chain. Example: - >>> from langchain_community.chat_models import ChatOpenAI + >>> from langchain_openai import ChatOpenAI >>> from langchain_classic.evaluation.comparison import PairwiseStringEvalChain - >>> llm = ChatOpenAI( + >>> model = ChatOpenAI( ... temperature=0, model_name="gpt-4", model_kwargs={"random_seed": 42} ... ) - >>> chain = PairwiseStringEvalChain.from_llm(llm=llm) + >>> chain = PairwiseStringEvalChain.from_llm(llm=model) >>> result = chain.evaluate_string_pairs( ... input = "What is the chemical formula for water?", ... prediction = "H2O", @@ -188,7 +188,7 @@ class PairwiseStringEvalChain(PairwiseStringEvaluator, LLMEvalChain, LLMChain): """ - output_key: str = "results" #: :meta private: + output_key: str = "results" output_parser: BaseOutputParser = Field( default_factory=PairwiseStringResultOutputParser, ) @@ -207,7 +207,7 @@ class PairwiseStringEvalChain(PairwiseStringEvaluator, LLMEvalChain, LLMChain): """Return whether the chain requires a reference. Returns: - True if the chain requires a reference, False otherwise. + `True` if the chain requires a reference, `False` otherwise. """ return False @@ -217,7 +217,7 @@ class PairwiseStringEvalChain(PairwiseStringEvaluator, LLMEvalChain, LLMChain): """Return whether the chain requires an input. Returns: - bool: True if the chain requires an input, False otherwise. + `True` if the chain requires an input, `False` otherwise. """ return True @@ -227,7 +227,7 @@ class PairwiseStringEvalChain(PairwiseStringEvaluator, LLMEvalChain, LLMChain): """Return the warning to show when reference is ignored. Returns: - str: The warning to show when reference is ignored. + The warning to show when reference is ignored. """ return ( @@ -343,7 +343,7 @@ Performance may be significantly worse with other models.", **kwargs: Additional keyword arguments. Returns: - A dictionary containing: + `dict` containing: - reasoning: The reasoning for the preference. - value: The preference value, which is either 'A', 'B', or None for no preference. @@ -389,7 +389,7 @@ Performance may be significantly worse with other models.", **kwargs: Additional keyword arguments. Returns: - A dictionary containing: + `dict` containing: - reasoning: The reasoning for the preference. - value: The preference value, which is either 'A', 'B', or None for no preference. @@ -425,7 +425,7 @@ class LabeledPairwiseStringEvalChain(PairwiseStringEvalChain): """Return whether the chain requires a reference. Returns: - bool: True if the chain requires a reference, False otherwise. + `True` if the chain requires a reference, `False` otherwise. """ return True @@ -442,18 +442,18 @@ class LabeledPairwiseStringEvalChain(PairwiseStringEvalChain): """Initialize the LabeledPairwiseStringEvalChain from an LLM. Args: - llm (BaseLanguageModel): The LLM to use. - prompt (PromptTemplate, optional): The prompt to use. - criteria (Union[CRITERIA_TYPE, str], optional): The criteria to use. - **kwargs (Any): Additional keyword arguments. + llm: The LLM to use. + prompt: The prompt to use. + criteria: The criteria to use. + **kwargs: Additional keyword arguments. Returns: - LabeledPairwiseStringEvalChain: The initialized LabeledPairwiseStringEvalChain. + The initialized `LabeledPairwiseStringEvalChain`. Raises: ValueError: If the input variables are not as expected. - """ # noqa: E501 + """ expected_input_vars = { "prediction", "prediction_b", diff --git a/libs/langchain/langchain_classic/evaluation/criteria/__init__.py b/libs/langchain/langchain_classic/evaluation/criteria/__init__.py index c275541a06c..a551f59dbe0 100644 --- a/libs/langchain/langchain_classic/evaluation/criteria/__init__.py +++ b/libs/langchain/langchain_classic/evaluation/criteria/__init__.py @@ -12,12 +12,12 @@ chain against specified criteria. Examples: -------- Using a predefined criterion: ->>> from langchain_community.llms import OpenAI +>>> from langchain_openai import OpenAI >>> from langchain_classic.evaluation.criteria import CriteriaEvalChain ->>> llm = OpenAI() +>>> model = OpenAI() >>> criteria = "conciseness" ->>> chain = CriteriaEvalChain.from_llm(llm=llm, criteria=criteria) +>>> chain = CriteriaEvalChain.from_llm(llm=model, criteria=criteria) >>> chain.evaluate_strings( prediction="The answer is 42.", reference="42", @@ -26,10 +26,10 @@ Using a predefined criterion: Using a custom criterion: ->>> from langchain_community.llms import OpenAI +>>> from langchain_openai import OpenAI >>> from langchain_classic.evaluation.criteria import LabeledCriteriaEvalChain ->>> llm = OpenAI() +>>> model = OpenAI() >>> criteria = { "hallucination": ( "Does this submission contain information" @@ -37,7 +37,7 @@ Using a custom criterion: ), } >>> chain = LabeledCriteriaEvalChain.from_llm( - llm=llm, + llm=model, criteria=criteria, ) >>> chain.evaluate_strings( diff --git a/libs/langchain/langchain_classic/evaluation/criteria/eval_chain.py b/libs/langchain/langchain_classic/evaluation/criteria/eval_chain.py index 80d964c451f..5d90659caa7 100644 --- a/libs/langchain/langchain_classic/evaluation/criteria/eval_chain.py +++ b/libs/langchain/langchain_classic/evaluation/criteria/eval_chain.py @@ -190,9 +190,9 @@ class CriteriaEvalChain(StringEvaluator, LLMEvalChain, LLMChain): -------- >>> from langchain_anthropic import ChatAnthropic >>> from langchain_classic.evaluation.criteria import CriteriaEvalChain - >>> llm = ChatAnthropic(temperature=0) + >>> model = ChatAnthropic(temperature=0) >>> criteria = {"my-custom-criterion": "Is the submission the most amazing ever?"} - >>> evaluator = CriteriaEvalChain.from_llm(llm=llm, criteria=criteria) + >>> evaluator = CriteriaEvalChain.from_llm(llm=model, criteria=criteria) >>> evaluator.evaluate_strings( ... prediction="Imagine an ice cream flavor for the color aquamarine", ... input="Tell me an idea", @@ -205,10 +205,10 @@ class CriteriaEvalChain(StringEvaluator, LLMEvalChain, LLMChain): >>> from langchain_openai import ChatOpenAI >>> from langchain_classic.evaluation.criteria import LabeledCriteriaEvalChain - >>> llm = ChatOpenAI(model="gpt-4", temperature=0) + >>> model = ChatOpenAI(model="gpt-4", temperature=0) >>> criteria = "correctness" >>> evaluator = LabeledCriteriaEvalChain.from_llm( - ... llm=llm, + ... llm=model, ... criteria=criteria, ... ) >>> evaluator.evaluate_strings( @@ -228,7 +228,7 @@ class CriteriaEvalChain(StringEvaluator, LLMEvalChain, LLMChain): """The parser to use to map the output to a structured result.""" criterion_name: str """The name of the criterion being evaluated.""" - output_key: str = "results" #: :meta private: + output_key: str = "results" @classmethod @override @@ -347,7 +347,7 @@ class CriteriaEvalChain(StringEvaluator, LLMEvalChain, LLMChain): -------- >>> from langchain_openai import OpenAI >>> from langchain_classic.evaluation.criteria import LabeledCriteriaEvalChain - >>> llm = OpenAI() + >>> model = OpenAI() >>> criteria = { "hallucination": ( "Does this submission contain information" @@ -355,7 +355,7 @@ class CriteriaEvalChain(StringEvaluator, LLMEvalChain, LLMChain): ), } >>> chain = LabeledCriteriaEvalChain.from_llm( - llm=llm, + llm=model, criteria=criteria, ) """ @@ -433,9 +433,9 @@ class CriteriaEvalChain(StringEvaluator, LLMEvalChain, LLMChain): Examples: >>> from langchain_openai import OpenAI >>> from langchain_classic.evaluation.criteria import CriteriaEvalChain - >>> llm = OpenAI() + >>> model = OpenAI() >>> criteria = "conciseness" - >>> chain = CriteriaEvalChain.from_llm(llm=llm, criteria=criteria) + >>> chain = CriteriaEvalChain.from_llm(llm=model, criteria=criteria) >>> chain.evaluate_strings( prediction="The answer is 42.", reference="42", @@ -485,9 +485,9 @@ class CriteriaEvalChain(StringEvaluator, LLMEvalChain, LLMChain): Examples: >>> from langchain_openai import OpenAI >>> from langchain_classic.evaluation.criteria import CriteriaEvalChain - >>> llm = OpenAI() + >>> model = OpenAI() >>> criteria = "conciseness" - >>> chain = CriteriaEvalChain.from_llm(llm=llm, criteria=criteria) + >>> chain = CriteriaEvalChain.from_llm(llm=model, criteria=criteria) >>> await chain.aevaluate_strings( prediction="The answer is 42.", reference="42", @@ -569,7 +569,7 @@ class LabeledCriteriaEvalChain(CriteriaEvalChain): -------- >>> from langchain_openai import OpenAI >>> from langchain_classic.evaluation.criteria import LabeledCriteriaEvalChain - >>> llm = OpenAI() + >>> model = OpenAI() >>> criteria = { "hallucination": ( "Does this submission contain information" @@ -577,7 +577,7 @@ class LabeledCriteriaEvalChain(CriteriaEvalChain): ), } >>> chain = LabeledCriteriaEvalChain.from_llm( - llm=llm, + llm=model, criteria=criteria, ) """ diff --git a/libs/langchain/langchain_classic/evaluation/embedding_distance/base.py b/libs/langchain/langchain_classic/evaluation/embedding_distance/base.py index 58b7f032ff2..c7c0e320dbe 100644 --- a/libs/langchain/langchain_classic/evaluation/embedding_distance/base.py +++ b/libs/langchain/langchain_classic/evaluation/embedding_distance/base.py @@ -48,10 +48,10 @@ def _check_numpy() -> bool: def _embedding_factory() -> Embeddings: - """Create an Embeddings object. + """Create an `Embeddings` object. Returns: - Embeddings: The created Embeddings object. + The created `Embeddings` object. """ # Here for backwards compatibility. # Generally, we do not want to be seeing imports from langchain community @@ -94,9 +94,8 @@ class _EmbeddingDistanceChainMixin(Chain): """Shared functionality for embedding distance evaluators. Attributes: - embeddings (Embeddings): The embedding objects to vectorize the outputs. - distance_metric (EmbeddingDistance): The distance metric to use - for comparing the embeddings. + embeddings: The embedding objects to vectorize the outputs. + distance_metric: The distance metric to use for comparing the embeddings. """ embeddings: Embeddings = Field(default_factory=_embedding_factory) @@ -107,10 +106,10 @@ class _EmbeddingDistanceChainMixin(Chain): """Validate that the TikTok library is installed. Args: - values (Dict[str, Any]): The values to validate. + values: The values to validate. Returns: - Dict[str, Any]: The validated values. + The validated values. """ embeddings = values.get("embeddings") types_ = [] @@ -159,7 +158,7 @@ class _EmbeddingDistanceChainMixin(Chain): """Return the output keys of the chain. Returns: - List[str]: The output keys. + The output keys. """ return ["score"] @@ -173,10 +172,10 @@ class _EmbeddingDistanceChainMixin(Chain): """Get the metric function for the given metric name. Args: - metric (EmbeddingDistance): The metric name. + metric: The metric name. Returns: - Any: The metric function. + The metric function. """ metrics = { EmbeddingDistance.COSINE: self._cosine_distance, @@ -334,7 +333,7 @@ class _EmbeddingDistanceChainMixin(Chain): vectors (np.ndarray): The input vectors. Returns: - float: The computed score. + The computed score. """ metric = self._get_metric(self.distance_metric) if _check_numpy() and isinstance(vectors, _import_numpy().ndarray): @@ -362,7 +361,7 @@ class EmbeddingDistanceEvalChain(_EmbeddingDistanceChainMixin, StringEvaluator): """Return whether the chain requires a reference. Returns: - bool: True if a reference is required, False otherwise. + True if a reference is required, `False` otherwise. """ return True @@ -376,7 +375,7 @@ class EmbeddingDistanceEvalChain(_EmbeddingDistanceChainMixin, StringEvaluator): """Return the input keys of the chain. Returns: - List[str]: The input keys. + The input keys. """ return ["prediction", "reference"] @@ -393,7 +392,7 @@ class EmbeddingDistanceEvalChain(_EmbeddingDistanceChainMixin, StringEvaluator): run_manager: The callback manager. Returns: - Dict[str, Any]: The computed score. + The computed score. """ vectors = self.embeddings.embed_documents( [inputs["prediction"], inputs["reference"]], @@ -413,12 +412,11 @@ class EmbeddingDistanceEvalChain(_EmbeddingDistanceChainMixin, StringEvaluator): """Asynchronously compute the score for a prediction and reference. Args: - inputs (Dict[str, Any]): The input data. - run_manager (AsyncCallbackManagerForChainRun, optional): - The callback manager. + inputs: The input data. + run_manager: The callback manager. Returns: - Dict[str, Any]: The computed score. + The computed score. """ vectors = await self.embeddings.aembed_documents( [ @@ -456,7 +454,7 @@ class EmbeddingDistanceEvalChain(_EmbeddingDistanceChainMixin, StringEvaluator): **kwargs: Additional keyword arguments. Returns: - A dictionary containing: + `dict` containing: - score: The embedding distance between the two predictions. """ result = self( @@ -492,7 +490,7 @@ class EmbeddingDistanceEvalChain(_EmbeddingDistanceChainMixin, StringEvaluator): **kwargs: Additional keyword arguments. Returns: - A dictionary containing: + `dict` containing: - score: The embedding distance between the two predictions. """ result = await self.acall( @@ -523,7 +521,7 @@ class PairwiseEmbeddingDistanceEvalChain( """Return the input keys of the chain. Returns: - List[str]: The input keys. + The input keys. """ return ["prediction", "prediction_b"] @@ -541,12 +539,11 @@ class PairwiseEmbeddingDistanceEvalChain( """Compute the score for two predictions. Args: - inputs (Dict[str, Any]): The input data. - run_manager (CallbackManagerForChainRun, optional): - The callback manager. + inputs: The input data. + run_manager: The callback manager. Returns: - Dict[str, Any]: The computed score. + The computed score. """ vectors = self.embeddings.embed_documents( [ @@ -569,12 +566,11 @@ class PairwiseEmbeddingDistanceEvalChain( """Asynchronously compute the score for two predictions. Args: - inputs (Dict[str, Any]): The input data. - run_manager (AsyncCallbackManagerForChainRun, optional): - The callback manager. + inputs: The input data. + run_manager: The callback manager. Returns: - Dict[str, Any]: The computed score. + The computed score. """ vectors = await self.embeddings.aembed_documents( [ @@ -612,7 +608,7 @@ class PairwiseEmbeddingDistanceEvalChain( **kwargs: Additional keyword arguments. Returns: - A dictionary containing: + `dict` containing: - score: The embedding distance between the two predictions. """ result = self( @@ -648,8 +644,8 @@ class PairwiseEmbeddingDistanceEvalChain( **kwargs: Additional keyword arguments. Returns: - A dictionary containing: - - score: The embedding distance between the two predictions. + `dict` containing: + - score: The embedding distance between the two predictions. """ result = await self.acall( inputs={"prediction": prediction, "prediction_b": prediction_b}, diff --git a/libs/langchain/langchain_classic/evaluation/exact_match/base.py b/libs/langchain/langchain_classic/evaluation/exact_match/base.py index b555d932b78..05f0202a316 100644 --- a/libs/langchain/langchain_classic/evaluation/exact_match/base.py +++ b/libs/langchain/langchain_classic/evaluation/exact_match/base.py @@ -31,15 +31,12 @@ class ExactMatchStringEvaluator(StringEvaluator): ignore_numbers: bool = False, **_: Any, ): - """Initialize the ExactMatchStringEvaluator. + """Initialize the `ExactMatchStringEvaluator`. Args: ignore_case: Whether to ignore case when comparing strings. - Defaults to `False`. ignore_punctuation: Whether to ignore punctuation when comparing strings. - Defaults to `False`. ignore_numbers: Whether to ignore numbers when comparing strings. - Defaults to `False`. """ super().__init__() self.ignore_case = ignore_case @@ -61,7 +58,7 @@ class ExactMatchStringEvaluator(StringEvaluator): """Get the input keys. Returns: - List[str]: The input keys. + The input keys. """ return ["reference", "prediction"] @@ -70,7 +67,7 @@ class ExactMatchStringEvaluator(StringEvaluator): """Get the evaluation name. Returns: - str: The evaluation name. + The evaluation name. """ return "exact_match" diff --git a/libs/langchain/langchain_classic/evaluation/parsing/base.py b/libs/langchain/langchain_classic/evaluation/parsing/base.py index 98141790096..bed2e6e3789 100644 --- a/libs/langchain/langchain_classic/evaluation/parsing/base.py +++ b/libs/langchain/langchain_classic/evaluation/parsing/base.py @@ -71,10 +71,11 @@ class JsonValidityEvaluator(StringEvaluator): **kwargs: Additional keyword arguments (not used). Returns: - dict: A dictionary containing the evaluation score. The score is 1 if - the prediction is valid JSON, and 0 otherwise. + `dict` containing the evaluation score. The score is `1` if + the prediction is valid JSON, and `0` otherwise. + If the prediction is not valid JSON, the dictionary also contains - a "reasoning" field with the error message. + a `reasoning` field with the error message. """ try: @@ -168,7 +169,7 @@ class JsonEqualityEvaluator(StringEvaluator): **kwargs: Additional keyword arguments (not used). Returns: - A dictionary containing the evaluation score. + `dict` containing the evaluation score. """ parsed = self._parse_json(prediction) label = self._parse_json(cast("str", reference)) diff --git a/libs/langchain/langchain_classic/evaluation/parsing/json_distance.py b/libs/langchain/langchain_classic/evaluation/parsing/json_distance.py index b1167cd40cb..057b8d4e1f4 100644 --- a/libs/langchain/langchain_classic/evaluation/parsing/json_distance.py +++ b/libs/langchain/langchain_classic/evaluation/parsing/json_distance.py @@ -50,7 +50,7 @@ class JsonEditDistanceEvaluator(StringEvaluator): Raises: ImportError: If the `rapidfuzz` package is not installed and no - `string_distance` function is provided. + `string_distance` function is provided. """ super().__init__() if string_distance is not None: diff --git a/libs/langchain/langchain_classic/evaluation/qa/eval_chain.py b/libs/langchain/langchain_classic/evaluation/qa/eval_chain.py index 189a2f53212..f87cb1b7488 100644 --- a/libs/langchain/langchain_classic/evaluation/qa/eval_chain.py +++ b/libs/langchain/langchain_classic/evaluation/qa/eval_chain.py @@ -77,7 +77,7 @@ def _parse_string_eval_output(text: str) -> dict: class QAEvalChain(LLMChain, StringEvaluator, LLMEvalChain): """LLM Chain for evaluating question answering.""" - output_key: str = "results" #: :meta private: + output_key: str = "results" model_config = ConfigDict( extra="ignore", @@ -113,17 +113,16 @@ class QAEvalChain(LLMChain, StringEvaluator, LLMEvalChain): """Load QA Eval Chain from LLM. Args: - llm (BaseLanguageModel): the base language model to use. + llm: The base language model to use. + prompt: A prompt template containing the input_variables: + `'input'`, `'answer'` and `'result'` that will be used as the prompt + for evaluation. - prompt (PromptTemplate): A prompt template containing the input_variables: - 'input', 'answer' and 'result' that will be used as the prompt - for evaluation. - Defaults to PROMPT. - - **kwargs: additional keyword arguments. + Defaults to `PROMPT`. + **kwargs: Additional keyword arguments. Returns: - QAEvalChain: the loaded QA eval chain. + The loaded QA eval chain. """ prompt = prompt or PROMPT expected_input_vars = {"query", "answer", "result"} @@ -264,17 +263,16 @@ class ContextQAEvalChain(LLMChain, StringEvaluator, LLMEvalChain): """Load QA Eval Chain from LLM. Args: - llm (BaseLanguageModel): the base language model to use. + llm: The base language model to use. + prompt: A prompt template containing the `input_variables`: + `'query'`, `'context'` and `'result'` that will be used as the prompt + for evaluation. - prompt (PromptTemplate): A prompt template containing the input_variables: - 'query', 'context' and 'result' that will be used as the prompt - for evaluation. - Defaults to PROMPT. - - **kwargs: additional keyword arguments. + Defaults to `PROMPT`. + **kwargs: Additional keyword arguments. Returns: - ContextQAEvalChain: the loaded QA eval chain. + The loaded QA eval chain. """ prompt = prompt or CONTEXT_PROMPT cls._validate_input_vars(prompt) diff --git a/libs/langchain/langchain_classic/evaluation/regex_match/base.py b/libs/langchain/langchain_classic/evaluation/regex_match/base.py index 23afa56ae4d..cda1ecee143 100644 --- a/libs/langchain/langchain_classic/evaluation/regex_match/base.py +++ b/libs/langchain/langchain_classic/evaluation/regex_match/base.py @@ -33,7 +33,7 @@ class RegexMatchStringEvaluator(StringEvaluator): """Initialize the RegexMatchStringEvaluator. Args: - flags: Flags to use for the regex match. Defaults to 0 (no flags). + flags: Flags to use for the regex match. Defaults to no flags. """ super().__init__() self.flags = flags @@ -53,7 +53,7 @@ class RegexMatchStringEvaluator(StringEvaluator): """Get the input keys. Returns: - List[str]: The input keys. + The input keys. """ return ["reference", "prediction"] @@ -62,7 +62,7 @@ class RegexMatchStringEvaluator(StringEvaluator): """Get the evaluation name. Returns: - str: The evaluation name. + The evaluation name. """ return "regex_match" diff --git a/libs/langchain/langchain_classic/evaluation/schema.py b/libs/langchain/langchain_classic/evaluation/schema.py index 25d2b0b028f..d8e598846d4 100644 --- a/libs/langchain/langchain_classic/evaluation/schema.py +++ b/libs/langchain/langchain_classic/evaluation/schema.py @@ -114,8 +114,8 @@ class _EvalArgsMixin: """Check if the evaluation arguments are valid. Args: - reference (str | None, optional): The reference label. - input_ (str | None, optional): The input string. + reference: The reference label. + input_: The input string. Raises: ValueError: If the evaluator requires an input string but none is provided, @@ -162,17 +162,17 @@ class StringEvaluator(_EvalArgsMixin, ABC): """Evaluate Chain or LLM output, based on optional input and label. Args: - prediction (str): The LLM or chain prediction to evaluate. - reference (str | None, optional): The reference label to evaluate against. - input (str | None, optional): The input to consider during evaluation. - kwargs: Additional keyword arguments, including callbacks, tags, etc. + prediction: The LLM or chain prediction to evaluate. + reference: The reference label to evaluate against. + input: The input to consider during evaluation. + **kwargs: Additional keyword arguments, including callbacks, tags, etc. Returns: - dict: The evaluation results containing the score or value. - It is recommended that the dictionary contain the following keys: - - score: the score of the evaluation, if applicable. - - value: the string value of the evaluation, if applicable. - - reasoning: the reasoning for the evaluation, if applicable. + The evaluation results containing the score or value. + It is recommended that the dictionary contain the following keys: + - score: the score of the evaluation, if applicable. + - value: the string value of the evaluation, if applicable. + - reasoning: the reasoning for the evaluation, if applicable. """ async def _aevaluate_strings( @@ -186,17 +186,17 @@ class StringEvaluator(_EvalArgsMixin, ABC): """Asynchronously evaluate Chain or LLM output, based on optional input and label. Args: - prediction (str): The LLM or chain prediction to evaluate. - reference (str | None, optional): The reference label to evaluate against. - input (str | None, optional): The input to consider during evaluation. - kwargs: Additional keyword arguments, including callbacks, tags, etc. + prediction: The LLM or chain prediction to evaluate. + reference: The reference label to evaluate against. + input: The input to consider during evaluation. + **kwargs: Additional keyword arguments, including callbacks, tags, etc. Returns: - dict: The evaluation results containing the score or value. - It is recommended that the dictionary contain the following keys: - - score: the score of the evaluation, if applicable. - - value: the string value of the evaluation, if applicable. - - reasoning: the reasoning for the evaluation, if applicable. + The evaluation results containing the score or value. + It is recommended that the dictionary contain the following keys: + - score: the score of the evaluation, if applicable. + - value: the string value of the evaluation, if applicable. + - reasoning: the reasoning for the evaluation, if applicable. """ # noqa: E501 return await run_in_executor( None, @@ -218,13 +218,13 @@ class StringEvaluator(_EvalArgsMixin, ABC): """Evaluate Chain or LLM output, based on optional input and label. Args: - prediction (str): The LLM or chain prediction to evaluate. - reference (str | None, optional): The reference label to evaluate against. - input (str | None, optional): The input to consider during evaluation. - kwargs: Additional keyword arguments, including callbacks, tags, etc. + prediction: The LLM or chain prediction to evaluate. + reference: The reference label to evaluate against. + input: The input to consider during evaluation. + **kwargs: Additional keyword arguments, including callbacks, tags, etc. Returns: - dict: The evaluation results containing the score or value. + The evaluation results containing the score or value. """ self._check_evaluation_args(reference=reference, input_=input) return self._evaluate_strings( @@ -245,13 +245,13 @@ class StringEvaluator(_EvalArgsMixin, ABC): """Asynchronously evaluate Chain or LLM output, based on optional input and label. Args: - prediction (str): The LLM or chain prediction to evaluate. - reference (str | None, optional): The reference label to evaluate against. - input (str | None, optional): The input to consider during evaluation. - kwargs: Additional keyword arguments, including callbacks, tags, etc. + prediction: The LLM or chain prediction to evaluate. + reference: The reference label to evaluate against. + input: The input to consider during evaluation. + **kwargs: Additional keyword arguments, including callbacks, tags, etc. Returns: - dict: The evaluation results containing the score or value. + The evaluation results containing the score or value. """ # noqa: E501 self._check_evaluation_args(reference=reference, input_=input) return await self._aevaluate_strings( @@ -278,14 +278,14 @@ class PairwiseStringEvaluator(_EvalArgsMixin, ABC): """Evaluate the output string pairs. Args: - prediction (str): The output string from the first model. - prediction_b (str): The output string from the second model. - reference (str | None, optional): The expected output / reference string. - input (str | None, optional): The input string. - kwargs: Additional keyword arguments, such as callbacks and optional reference strings. + prediction: The output string from the first model. + prediction_b: The output string from the second model. + reference: The expected output / reference string. + input: The input string. + **kwargs: Additional keyword arguments, such as callbacks and optional reference strings. Returns: - dict: A dictionary containing the preference, scores, and/or other information. + `dict` containing the preference, scores, and/or other information. """ # noqa: E501 async def _aevaluate_string_pairs( @@ -300,14 +300,14 @@ class PairwiseStringEvaluator(_EvalArgsMixin, ABC): """Asynchronously evaluate the output string pairs. Args: - prediction (str): The output string from the first model. - prediction_b (str): The output string from the second model. - reference (str | None, optional): The expected output / reference string. - input (str | None, optional): The input string. - kwargs: Additional keyword arguments, such as callbacks and optional reference strings. + prediction: The output string from the first model. + prediction_b: The output string from the second model. + reference: The expected output / reference string. + input: The input string. + **kwargs: Additional keyword arguments, such as callbacks and optional reference strings. Returns: - dict: A dictionary containing the preference, scores, and/or other information. + `dict` containing the preference, scores, and/or other information. """ # noqa: E501 return await run_in_executor( None, @@ -331,14 +331,14 @@ class PairwiseStringEvaluator(_EvalArgsMixin, ABC): """Evaluate the output string pairs. Args: - prediction (str): The output string from the first model. - prediction_b (str): The output string from the second model. - reference (str | None, optional): The expected output / reference string. - input (str | None, optional): The input string. - kwargs: Additional keyword arguments, such as callbacks and optional reference strings. + prediction: The output string from the first model. + prediction_b: The output string from the second model. + reference: The expected output / reference string. + input: The input string. + **kwargs: Additional keyword arguments, such as callbacks and optional reference strings. Returns: - dict: A dictionary containing the preference, scores, and/or other information. + `dict` containing the preference, scores, and/or other information. """ # noqa: E501 self._check_evaluation_args(reference=reference, input_=input) return self._evaluate_string_pairs( @@ -361,14 +361,14 @@ class PairwiseStringEvaluator(_EvalArgsMixin, ABC): """Asynchronously evaluate the output string pairs. Args: - prediction (str): The output string from the first model. - prediction_b (str): The output string from the second model. - reference (str | None, optional): The expected output / reference string. - input (str | None, optional): The input string. - kwargs: Additional keyword arguments, such as callbacks and optional reference strings. + prediction: The output string from the first model. + prediction_b: The output string from the second model. + reference: The expected output / reference string. + input: The input string. + **kwargs: Additional keyword arguments, such as callbacks and optional reference strings. Returns: - dict: A dictionary containing the preference, scores, and/or other information. + `dict` containing the preference, scores, and/or other information. """ # noqa: E501 self._check_evaluation_args(reference=reference, input_=input) return await self._aevaluate_string_pairs( diff --git a/libs/langchain/langchain_classic/evaluation/scoring/__init__.py b/libs/langchain/langchain_classic/evaluation/scoring/__init__.py index 4faaefb278a..5527d530e4a 100644 --- a/libs/langchain/langchain_classic/evaluation/scoring/__init__.py +++ b/libs/langchain/langchain_classic/evaluation/scoring/__init__.py @@ -5,10 +5,10 @@ be they LLMs, Chains, or otherwise. This can be based on a variety of criteria and or a reference answer. Example: - >>> from langchain_community.chat_models import ChatOpenAI + >>> from langchain_openai import ChatOpenAI >>> from langchain_classic.evaluation.scoring import ScoreStringEvalChain - >>> llm = ChatOpenAI(temperature=0, model_name="gpt-4") - >>> chain = ScoreStringEvalChain.from_llm(llm=llm) + >>> model = ChatOpenAI(temperature=0, model_name="gpt-4") + >>> chain = ScoreStringEvalChain.from_llm(llm=model) >>> result = chain.evaluate_strings( ... input="What is the chemical formula for water?", ... prediction="H2O", diff --git a/libs/langchain/langchain_classic/evaluation/scoring/eval_chain.py b/libs/langchain/langchain_classic/evaluation/scoring/eval_chain.py index cbc01d75bdf..f5ac405ac83 100644 --- a/libs/langchain/langchain_classic/evaluation/scoring/eval_chain.py +++ b/libs/langchain/langchain_classic/evaluation/scoring/eval_chain.py @@ -56,10 +56,10 @@ def resolve_criteria( """Resolve the criteria for the pairwise evaluator. Args: - criteria (Union[CRITERIA_TYPE, str], optional): The criteria to use. + criteria: The criteria to use. Returns: - dict: The resolved criteria. + The resolved criteria. """ if criteria is None: @@ -154,10 +154,10 @@ class ScoreStringEvalChain(StringEvaluator, LLMEvalChain, LLMChain): output_parser (BaseOutputParser): The output parser for the chain. Example: - >>> from langchain_community.chat_models import ChatOpenAI + >>> from langchain_openai import ChatOpenAI >>> from langchain_classic.evaluation.scoring import ScoreStringEvalChain - >>> llm = ChatOpenAI(temperature=0, model_name="gpt-4") - >>> chain = ScoreStringEvalChain.from_llm(llm=llm) + >>> model = ChatOpenAI(temperature=0, model_name="gpt-4") + >>> chain = ScoreStringEvalChain.from_llm(llm=model) >>> result = chain.evaluate_strings( ... input="What is the chemical formula for water?", ... prediction="H2O", @@ -173,7 +173,7 @@ class ScoreStringEvalChain(StringEvaluator, LLMEvalChain, LLMChain): """ - output_key: str = "results" #: :meta private: + output_key: str = "results" output_parser: BaseOutputParser = Field( default_factory=ScoreStringResultOutputParser, ) @@ -196,7 +196,7 @@ class ScoreStringEvalChain(StringEvaluator, LLMEvalChain, LLMChain): """Return whether the chain requires a reference. Returns: - bool: True if the chain requires a reference, False otherwise. + `True` if the chain requires a reference, `False` otherwise. """ return False @@ -206,7 +206,7 @@ class ScoreStringEvalChain(StringEvaluator, LLMEvalChain, LLMChain): """Return whether the chain requires an input. Returns: - bool: True if the chain requires an input, False otherwise. + `True` if the chain requires an input, `False` otherwise. """ return True @@ -227,7 +227,7 @@ class ScoreStringEvalChain(StringEvaluator, LLMEvalChain, LLMChain): """Return the warning to show when reference is ignored. Returns: - str: The warning to show when reference is ignored. + The warning to show when reference is ignored. """ return ( @@ -354,7 +354,7 @@ Performance may be significantly worse with other models.", **kwargs: Additional keyword arguments. Returns: - A dictionary containing: + `dict` containing: - reasoning: The reasoning for the preference. - score: A score between 1 and 10. @@ -395,7 +395,7 @@ Performance may be significantly worse with other models.", **kwargs: Additional keyword arguments. Returns: - A dictionary containing: + `dict` containing: - reasoning: The reasoning for the preference. - score: A score between 1 and 10. @@ -424,7 +424,7 @@ class LabeledScoreStringEvalChain(ScoreStringEvalChain): """Return whether the chain requires a reference. Returns: - bool: True if the chain requires a reference, False otherwise. + `True` if the chain requires a reference, `False` otherwise. """ return True @@ -442,14 +442,14 @@ class LabeledScoreStringEvalChain(ScoreStringEvalChain): """Initialize the LabeledScoreStringEvalChain from an LLM. Args: - llm (BaseLanguageModel): The LLM to use. - prompt (PromptTemplate, optional): The prompt to use. - criteria (Union[CRITERIA_TYPE, str], optional): The criteria to use. - normalize_by (float, optional): The value to normalize the score by. - **kwargs (Any): Additional keyword arguments. + llm: The LLM to use. + prompt: The prompt to use. + criteria: The criteria to use. + normalize_by: The value to normalize the score by. + **kwargs: Additional keyword arguments. Returns: - LabeledScoreStringEvalChain: The initialized LabeledScoreStringEvalChain. + The initialized LabeledScoreStringEvalChain. Raises: ValueError: If the input variables are not as expected. diff --git a/libs/langchain/langchain_classic/evaluation/string_distance/base.py b/libs/langchain/langchain_classic/evaluation/string_distance/base.py index e8d5c06fa9a..88e91cffefe 100644 --- a/libs/langchain/langchain_classic/evaluation/string_distance/base.py +++ b/libs/langchain/langchain_classic/evaluation/string_distance/base.py @@ -25,7 +25,7 @@ def _load_rapidfuzz() -> Any: ImportError: If the rapidfuzz library is not installed. Returns: - Any: The rapidfuzz.distance module. + The `rapidfuzz.distance` module. """ try: import rapidfuzz @@ -42,12 +42,12 @@ class StringDistance(str, Enum): """Distance metric to use. Attributes: - DAMERAU_LEVENSHTEIN: The Damerau-Levenshtein distance. - LEVENSHTEIN: The Levenshtein distance. - JARO: The Jaro distance. - JARO_WINKLER: The Jaro-Winkler distance. - HAMMING: The Hamming distance. - INDEL: The Indel distance. + `DAMERAU_LEVENSHTEIN`: The Damerau-Levenshtein distance. + `LEVENSHTEIN`: The Levenshtein distance. + `JARO`: The Jaro distance. + `JARO_WINKLER`: The Jaro-Winkler distance. + `HAMMING`: The Hamming distance. + `INDEL`: The Indel distance. """ DAMERAU_LEVENSHTEIN = "damerau_levenshtein" @@ -63,7 +63,7 @@ class _RapidFuzzChainMixin(Chain): distance: StringDistance = Field(default=StringDistance.JARO_WINKLER) normalize_score: bool = Field(default=True) - """Whether to normalize the score to a value between 0 and 1. + """Whether to normalize the score to a value between `0` and `1`. Applies only to the Levenshtein and Damerau-Levenshtein distances.""" @pre_init @@ -71,10 +71,10 @@ class _RapidFuzzChainMixin(Chain): """Validate that the rapidfuzz library is installed. Args: - values (Dict[str, Any]): The input values. + values: The input values. Returns: - Dict[str, Any]: The validated values. + The validated values. """ _load_rapidfuzz() return values @@ -84,7 +84,7 @@ class _RapidFuzzChainMixin(Chain): """Get the output keys. Returns: - List[str]: The output keys. + The output keys. """ return ["score"] @@ -92,10 +92,10 @@ class _RapidFuzzChainMixin(Chain): """Prepare the output dictionary. Args: - result (Dict[str, Any]): The evaluation results. + result: The evaluation results. Returns: - Dict[str, Any]: The prepared output dictionary. + The prepared output dictionary. """ result = {"score": result["score"]} if RUN_KEY in result: @@ -111,7 +111,7 @@ class _RapidFuzzChainMixin(Chain): normalize_score: Whether to normalize the score. Returns: - Callable: The distance metric function. + The distance metric function. Raises: ValueError: If the distance metric is invalid. @@ -142,7 +142,7 @@ class _RapidFuzzChainMixin(Chain): """Get the distance metric function. Returns: - Callable: The distance metric function. + The distance metric function. """ return _RapidFuzzChainMixin._get_metric( self.distance, @@ -199,7 +199,7 @@ class StringDistanceEvalChain(StringEvaluator, _RapidFuzzChainMixin): """Get the input keys. Returns: - List[str]: The input keys. + The input keys. """ return ["reference", "prediction"] @@ -208,7 +208,7 @@ class StringDistanceEvalChain(StringEvaluator, _RapidFuzzChainMixin): """Get the evaluation name. Returns: - str: The evaluation name. + The evaluation name. """ return f"{self.distance.value}_distance" @@ -330,7 +330,7 @@ class PairwiseStringDistanceEvalChain(PairwiseStringEvaluator, _RapidFuzzChainMi """Get the input keys. Returns: - List[str]: The input keys. + The input keys. """ return ["prediction", "prediction_b"] @@ -339,7 +339,7 @@ class PairwiseStringDistanceEvalChain(PairwiseStringEvaluator, _RapidFuzzChainMi """Get the evaluation name. Returns: - str: The evaluation name. + The evaluation name. """ return f"pairwise_{self.distance.value}_distance" @@ -352,12 +352,11 @@ class PairwiseStringDistanceEvalChain(PairwiseStringEvaluator, _RapidFuzzChainMi """Compute the string distance between two predictions. Args: - inputs (Dict[str, Any]): The input values. - run_manager (CallbackManagerForChainRun , optional): - The callback manager. + inputs: The input values. + run_manager: The callback manager. Returns: - Dict[str, Any]: The evaluation results containing the score. + The evaluation results containing the score. """ return { "score": self.compute_metric(inputs["prediction"], inputs["prediction_b"]), @@ -372,12 +371,11 @@ class PairwiseStringDistanceEvalChain(PairwiseStringEvaluator, _RapidFuzzChainMi """Asynchronously compute the string distance between two predictions. Args: - inputs (Dict[str, Any]): The input values. - run_manager (AsyncCallbackManagerForChainRun , optional): - The callback manager. + inputs: The input values. + run_manager: The callback manager. Returns: - Dict[str, Any]: The evaluation results containing the score. + The evaluation results containing the score. """ return { "score": self.compute_metric(inputs["prediction"], inputs["prediction_b"]), diff --git a/libs/langchain/langchain_classic/hub.py b/libs/langchain/langchain_classic/hub.py index 7abc310ab45..f23887a4546 100644 --- a/libs/langchain/langchain_classic/hub.py +++ b/libs/langchain/langchain_classic/hub.py @@ -15,6 +15,21 @@ def _get_client( api_key: str | None = None, api_url: str | None = None, ) -> Any: + """Get a client for interacting with the LangChain Hub. + + Attempts to use LangSmith client if available, otherwise falls back to + the legacy `langchainhub` client. + + Args: + api_key: API key to authenticate with the LangChain Hub API. + api_url: URL of the LangChain Hub API. + + Returns: + Client instance for interacting with the hub. + + Raises: + ImportError: If neither `langsmith` nor `langchainhub` can be imported. + """ try: from langsmith import Client as LangSmithClient @@ -51,18 +66,22 @@ def push( ) -> str: """Push an object to the hub and returns the URL it can be viewed at in a browser. - :param repo_full_name: The full name of the prompt to push to in the format of - `owner/prompt_name` or `prompt_name`. - :param object: The LangChain to serialize and push to the hub. - :param api_url: The URL of the LangChain Hub API. Defaults to the hosted API service - if you have an api key set, or a localhost instance if not. - :param api_key: The API key to use to authenticate with the LangChain Hub API. - :param parent_commit_hash: The commit hash of the parent commit to push to. Defaults - to the latest commit automatically. - :param new_repo_is_public: Whether the prompt should be public. Defaults to - False (Private by default). - :param new_repo_description: The description of the prompt. Defaults to an empty - string. + Args: + repo_full_name: The full name of the prompt to push to in the format of + `owner/prompt_name` or `prompt_name`. + object: The LangChain object to serialize and push to the hub. + api_url: The URL of the LangChain Hub API. Defaults to the hosted API service + if you have an API key set, or a localhost instance if not. + api_key: The API key to use to authenticate with the LangChain Hub API. + parent_commit_hash: The commit hash of the parent commit to push to. Defaults + to the latest commit automatically. + new_repo_is_public: Whether the prompt should be public. + new_repo_description: The description of the prompt. + readme: README content for the repository. + tags: Tags to associate with the prompt. + + Returns: + URL where the pushed object can be viewed in a browser. """ client = _get_client(api_key=api_key, api_url=api_url) @@ -98,12 +117,17 @@ def pull( ) -> Any: """Pull an object from the hub and returns it as a LangChain object. - :param owner_repo_commit: The full name of the prompt to pull from in the format of - `owner/prompt_name:commit_hash` or `owner/prompt_name` - or just `prompt_name` if it's your own prompt. - :param api_url: The URL of the LangChain Hub API. Defaults to the hosted API service - if you have an api key set, or a localhost instance if not. - :param api_key: The API key to use to authenticate with the LangChain Hub API. + Args: + owner_repo_commit: The full name of the prompt to pull from in the format of + `owner/prompt_name:commit_hash` or `owner/prompt_name` + or just `prompt_name` if it's your own prompt. + include_model: Whether to include the model configuration in the pulled prompt. + api_url: The URL of the LangChain Hub API. Defaults to the hosted API service + if you have an API key set, or a localhost instance if not. + api_key: The API key to use to authenticate with the LangChain Hub API. + + Returns: + The pulled LangChain object. """ client = _get_client(api_key=api_key, api_url=api_url) diff --git a/libs/langchain/langchain_classic/indexes/_sql_record_manager.py b/libs/langchain/langchain_classic/indexes/_sql_record_manager.py index c5dbcfd8a67..e9ab8413ebb 100644 --- a/libs/langchain/langchain_classic/indexes/_sql_record_manager.py +++ b/libs/langchain/langchain_classic/indexes/_sql_record_manager.py @@ -97,21 +97,17 @@ class SQLRecordManager(RecordManager): """Initialize the SQLRecordManager. This class serves as a manager persistence layer that uses an SQL - backend to track upserted records. You should specify either a db_url + backend to track upserted records. You should specify either a `db_url` to create an engine or provide an existing engine. Args: namespace: The namespace associated with this record manager. engine: An already existing SQL Alchemy engine. - Default is None. - db_url: A database connection string used to create - an SQL Alchemy engine. Default is None. - engine_kwargs: Additional keyword arguments - to be passed when creating the engine. Default is an empty dictionary. - async_mode: Whether to create an async engine. - Driver should support async operations. - It only applies if db_url is provided. - Default is False. + db_url: A database connection string used to create an SQL Alchemy engine. + engine_kwargs: Additional keyword arguments to be passed when creating the + engine. + async_mode: Whether to create an async engine. Driver should support async + operations. It only applies if `db_url` is provided. Raises: ValueError: If both db_url and engine are provided or neither. diff --git a/libs/langchain/langchain_classic/indexes/vectorstore.py b/libs/langchain/langchain_classic/indexes/vectorstore.py index d23951d0daf..073091c2b2a 100644 --- a/libs/langchain/langchain_classic/indexes/vectorstore.py +++ b/libs/langchain/langchain_classic/indexes/vectorstore.py @@ -22,7 +22,7 @@ def _get_default_text_splitter() -> TextSplitter: class VectorStoreIndexWrapper(BaseModel): - """Wrapper around a vectorstore for easy access.""" + """Wrapper around a `VectorStore` for easy access.""" vectorstore: VectorStore @@ -38,11 +38,11 @@ class VectorStoreIndexWrapper(BaseModel): retriever_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> str: - """Query the vectorstore using the provided LLM. + """Query the `VectorStore` using the provided LLM. Args: question: The question or prompt to query. - llm: The language model to use. Must not be None. + llm: The language model to use. Must not be `None`. retriever_kwargs: Optional keyword arguments for the retriever. **kwargs: Additional keyword arguments forwarded to the chain. @@ -55,7 +55,7 @@ class VectorStoreIndexWrapper(BaseModel): "Please provide an llm to use for querying the vectorstore.\n" "For example,\n" "from langchain_openai import OpenAI\n" - "llm = OpenAI(temperature=0)" + "model = OpenAI(temperature=0)" ) raise NotImplementedError(msg) retriever_kwargs = retriever_kwargs or {} @@ -73,11 +73,11 @@ class VectorStoreIndexWrapper(BaseModel): retriever_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> str: - """Asynchronously query the vectorstore using the provided LLM. + """Asynchronously query the `VectorStore` using the provided LLM. Args: question: The question or prompt to query. - llm: The language model to use. Must not be None. + llm: The language model to use. Must not be `None`. retriever_kwargs: Optional keyword arguments for the retriever. **kwargs: Additional keyword arguments forwarded to the chain. @@ -90,7 +90,7 @@ class VectorStoreIndexWrapper(BaseModel): "Please provide an llm to use for querying the vectorstore.\n" "For example,\n" "from langchain_openai import OpenAI\n" - "llm = OpenAI(temperature=0)" + "model = OpenAI(temperature=0)" ) raise NotImplementedError(msg) retriever_kwargs = retriever_kwargs or {} @@ -108,16 +108,16 @@ class VectorStoreIndexWrapper(BaseModel): retriever_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> dict: - """Query the vectorstore and retrieve the answer along with sources. + """Query the `VectorStore` and retrieve the answer along with sources. Args: question: The question or prompt to query. - llm: The language model to use. Must not be None. + llm: The language model to use. Must not be `None`. retriever_kwargs: Optional keyword arguments for the retriever. **kwargs: Additional keyword arguments forwarded to the chain. Returns: - A dictionary containing the answer and source documents. + `dict` containing the answer and source documents. """ if llm is None: msg = ( @@ -125,7 +125,7 @@ class VectorStoreIndexWrapper(BaseModel): "Please provide an llm to use for querying the vectorstore.\n" "For example,\n" "from langchain_openai import OpenAI\n" - "llm = OpenAI(temperature=0)" + "model = OpenAI(temperature=0)" ) raise NotImplementedError(msg) retriever_kwargs = retriever_kwargs or {} @@ -143,16 +143,16 @@ class VectorStoreIndexWrapper(BaseModel): retriever_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> dict: - """Asynchronously query the vectorstore and retrieve the answer and sources. + """Asynchronously query the `VectorStore` and retrieve the answer and sources. Args: question: The question or prompt to query. - llm: The language model to use. Must not be None. + llm: The language model to use. Must not be `None`. retriever_kwargs: Optional keyword arguments for the retriever. **kwargs: Additional keyword arguments forwarded to the chain. Returns: - A dictionary containing the answer and source documents. + `dict` containing the answer and source documents. """ if llm is None: msg = ( @@ -160,7 +160,7 @@ class VectorStoreIndexWrapper(BaseModel): "Please provide an llm to use for querying the vectorstore.\n" "For example,\n" "from langchain_openai import OpenAI\n" - "llm = OpenAI(temperature=0)" + "model = OpenAI(temperature=0)" ) raise NotImplementedError(msg) retriever_kwargs = retriever_kwargs or {} @@ -173,7 +173,7 @@ class VectorStoreIndexWrapper(BaseModel): def _get_in_memory_vectorstore() -> type[VectorStore]: - """Get the InMemoryVectorStore.""" + """Get the `InMemoryVectorStore`.""" import warnings try: @@ -184,7 +184,7 @@ def _get_in_memory_vectorstore() -> type[VectorStore]: warnings.warn( "Using InMemoryVectorStore as the default vectorstore." "This memory store won't persist data. You should explicitly" - "specify a vectorstore when using VectorstoreIndexCreator", + "specify a VectorStore when using VectorstoreIndexCreator", stacklevel=3, ) return InMemoryVectorStore @@ -206,7 +206,7 @@ class VectorstoreIndexCreator(BaseModel): ) def from_loaders(self, loaders: list[BaseLoader]) -> VectorStoreIndexWrapper: - """Create a vectorstore index from a list of loaders. + """Create a `VectorStore` index from a list of loaders. Args: loaders: A list of `BaseLoader` instances to load documents. @@ -220,7 +220,7 @@ class VectorstoreIndexCreator(BaseModel): return self.from_documents(docs) async def afrom_loaders(self, loaders: list[BaseLoader]) -> VectorStoreIndexWrapper: - """Asynchronously create a vectorstore index from a list of loaders. + """Asynchronously create a `VectorStore` index from a list of loaders. Args: loaders: A list of `BaseLoader` instances to load documents. @@ -234,7 +234,7 @@ class VectorstoreIndexCreator(BaseModel): return await self.afrom_documents(docs) def from_documents(self, documents: list[Document]) -> VectorStoreIndexWrapper: - """Create a vectorstore index from a list of documents. + """Create a `VectorStore` index from a list of documents. Args: documents: A list of `Document` objects. @@ -254,7 +254,7 @@ class VectorstoreIndexCreator(BaseModel): self, documents: list[Document], ) -> VectorStoreIndexWrapper: - """Asynchronously create a vectorstore index from a list of documents. + """Asynchronously create a `VectorStore` index from a list of documents. Args: documents: A list of `Document` objects. diff --git a/libs/langchain/langchain_classic/memory/buffer.py b/libs/langchain/langchain_classic/memory/buffer.py index c356b70da08..ab177fe3837 100644 --- a/libs/langchain/langchain_classic/memory/buffer.py +++ b/libs/langchain/langchain_classic/memory/buffer.py @@ -1,11 +1,11 @@ from typing import Any from langchain_core._api import deprecated -from langchain_core.memory import BaseMemory from langchain_core.messages import BaseMessage, get_buffer_string from langchain_core.utils import pre_init from typing_extensions import override +from langchain_classic.base_memory import BaseMemory from langchain_classic.memory.chat_memory import BaseChatMemory from langchain_classic.memory.utils import get_prompt_input_key @@ -30,7 +30,7 @@ class ConversationBufferMemory(BaseChatMemory): human_prefix: str = "Human" ai_prefix: str = "AI" - memory_key: str = "history" #: :meta private: + memory_key: str = "history" @property def buffer(self) -> Any: @@ -73,10 +73,7 @@ class ConversationBufferMemory(BaseChatMemory): @property def memory_variables(self) -> list[str]: - """Will always return list of memory variables. - - :meta private: - """ + """Will always return list of memory variables.""" return [self.memory_key] @override @@ -118,7 +115,7 @@ class ConversationStringBufferMemory(BaseMemory): buffer: str = "" output_key: str | None = None input_key: str | None = None - memory_key: str = "history" #: :meta private: + memory_key: str = "history" @pre_init def validate_chains(cls, values: dict) -> dict: @@ -130,10 +127,7 @@ class ConversationStringBufferMemory(BaseMemory): @property def memory_variables(self) -> list[str]: - """Will always return list of memory variables. - - :meta private: - """ + """Will always return list of memory variables.""" return [self.memory_key] @override diff --git a/libs/langchain/langchain_classic/memory/buffer_window.py b/libs/langchain/langchain_classic/memory/buffer_window.py index 264a836caa4..97e0d9cbb7c 100644 --- a/libs/langchain/langchain_classic/memory/buffer_window.py +++ b/libs/langchain/langchain_classic/memory/buffer_window.py @@ -24,7 +24,7 @@ class ConversationBufferWindowMemory(BaseChatMemory): human_prefix: str = "Human" ai_prefix: str = "AI" - memory_key: str = "history" #: :meta private: + memory_key: str = "history" k: int = 5 """Number of messages to store in buffer.""" @@ -50,10 +50,7 @@ class ConversationBufferWindowMemory(BaseChatMemory): @property def memory_variables(self) -> list[str]: - """Will always return list of memory variables. - - :meta private: - """ + """Will always return list of memory variables.""" return [self.memory_key] @override diff --git a/libs/langchain/langchain_classic/memory/chat_memory.py b/libs/langchain/langchain_classic/memory/chat_memory.py index 5a86a78024e..c775c6ba317 100644 --- a/libs/langchain/langchain_classic/memory/chat_memory.py +++ b/libs/langchain/langchain_classic/memory/chat_memory.py @@ -7,10 +7,10 @@ from langchain_core.chat_history import ( BaseChatMessageHistory, InMemoryChatMessageHistory, ) -from langchain_core.memory import BaseMemory from langchain_core.messages import AIMessage, HumanMessage from pydantic import Field +from langchain_classic.base_memory import BaseMemory from langchain_classic.memory.utils import get_prompt_input_key diff --git a/libs/langchain/langchain_classic/memory/combined.py b/libs/langchain/langchain_classic/memory/combined.py index b19c97edec1..3a5781ce01a 100644 --- a/libs/langchain/langchain_classic/memory/combined.py +++ b/libs/langchain/langchain_classic/memory/combined.py @@ -1,9 +1,9 @@ import warnings from typing import Any -from langchain_core.memory import BaseMemory from pydantic import field_validator +from langchain_classic.base_memory import BaseMemory from langchain_classic.memory.chat_memory import BaseChatMemory diff --git a/libs/langchain/langchain_classic/memory/entity.py b/libs/langchain/langchain_classic/memory/entity.py index 3b45140792d..be0679e4557 100644 --- a/libs/langchain/langchain_classic/memory/entity.py +++ b/libs/langchain/langchain_classic/memory/entity.py @@ -496,10 +496,7 @@ class ConversationEntityMemory(BaseChatMemory): @property def memory_variables(self) -> list[str]: - """Will always return list of memory variables. - - :meta private: - """ + """Will always return list of memory variables.""" return ["entities", self.chat_history_key] def load_memory_variables(self, inputs: dict[str, Any]) -> dict[str, Any]: diff --git a/libs/langchain/langchain_classic/memory/readonly.py b/libs/langchain/langchain_classic/memory/readonly.py index 42206123160..85ac1c626e1 100644 --- a/libs/langchain/langchain_classic/memory/readonly.py +++ b/libs/langchain/langchain_classic/memory/readonly.py @@ -1,6 +1,6 @@ from typing import Any -from langchain_core.memory import BaseMemory +from langchain_classic.base_memory import BaseMemory class ReadOnlySharedMemory(BaseMemory): diff --git a/libs/langchain/langchain_classic/memory/simple.py b/libs/langchain/langchain_classic/memory/simple.py index 61fb2b27301..c9163c396f3 100644 --- a/libs/langchain/langchain_classic/memory/simple.py +++ b/libs/langchain/langchain_classic/memory/simple.py @@ -1,8 +1,9 @@ from typing import Any -from langchain_core.memory import BaseMemory from typing_extensions import override +from langchain_classic.base_memory import BaseMemory + class SimpleMemory(BaseMemory): """Simple Memory. diff --git a/libs/langchain/langchain_classic/memory/summary.py b/libs/langchain/langchain_classic/memory/summary.py index 5b2ed54e56a..bd3d6e86895 100644 --- a/libs/langchain/langchain_classic/memory/summary.py +++ b/libs/langchain/langchain_classic/memory/summary.py @@ -97,7 +97,7 @@ class ConversationSummaryMemory(BaseChatMemory, SummarizerMixin): """ buffer: str = "" - memory_key: str = "history" #: :meta private: + memory_key: str = "history" @classmethod def from_messages( @@ -129,10 +129,7 @@ class ConversationSummaryMemory(BaseChatMemory, SummarizerMixin): @property def memory_variables(self) -> list[str]: - """Will always return list of memory variables. - - :meta private: - """ + """Will always return list of memory variables.""" return [self.memory_key] @override diff --git a/libs/langchain/langchain_classic/memory/summary_buffer.py b/libs/langchain/langchain_classic/memory/summary_buffer.py index fffcceb27bb..40c8ba168cf 100644 --- a/libs/langchain/langchain_classic/memory/summary_buffer.py +++ b/libs/langchain/langchain_classic/memory/summary_buffer.py @@ -41,10 +41,7 @@ class ConversationSummaryBufferMemory(BaseChatMemory, SummarizerMixin): @property def memory_variables(self) -> list[str]: - """Will always return list of memory variables. - - :meta private: - """ + """Will always return list of memory variables.""" return [self.memory_key] @override diff --git a/libs/langchain/langchain_classic/memory/token_buffer.py b/libs/langchain/langchain_classic/memory/token_buffer.py index caa0f78bcef..665da3c23dd 100644 --- a/libs/langchain/langchain_classic/memory/token_buffer.py +++ b/libs/langchain/langchain_classic/memory/token_buffer.py @@ -50,10 +50,7 @@ class ConversationTokenBufferMemory(BaseChatMemory): @property def memory_variables(self) -> list[str]: - """Will always return list of memory variables. - - :meta private: - """ + """Will always return list of memory variables.""" return [self.memory_key] @override diff --git a/libs/langchain/langchain_classic/memory/vectorstore.py b/libs/langchain/langchain_classic/memory/vectorstore.py index ce09cb3a3aa..fc6e07fe929 100644 --- a/libs/langchain/langchain_classic/memory/vectorstore.py +++ b/libs/langchain/langchain_classic/memory/vectorstore.py @@ -5,10 +5,10 @@ from typing import Any from langchain_core._api import deprecated from langchain_core.documents import Document -from langchain_core.memory import BaseMemory from langchain_core.vectorstores import VectorStoreRetriever from pydantic import Field +from langchain_classic.base_memory import BaseMemory from langchain_classic.memory.utils import get_prompt_input_key @@ -30,7 +30,7 @@ class VectorStoreRetrieverMemory(BaseMemory): retriever: VectorStoreRetriever = Field(exclude=True) """VectorStoreRetriever object to connect to.""" - memory_key: str = "history" #: :meta private: + memory_key: str = "history" """Key name to locate the memories in the result of load_memory_variables.""" input_key: str | None = None diff --git a/libs/langchain/langchain_classic/memory/vectorstore_token_buffer_memory.py b/libs/langchain/langchain_classic/memory/vectorstore_token_buffer_memory.py index ede7673f9a5..dcf97ac4fe2 100644 --- a/libs/langchain/langchain_classic/memory/vectorstore_token_buffer_memory.py +++ b/libs/langchain/langchain_classic/memory/vectorstore_token_buffer_memory.py @@ -2,8 +2,8 @@ This implements a conversation memory in which the messages are stored in a memory buffer up to a specified token limit. When the limit is exceeded, older messages are -saved to a vectorstore backing database. The vectorstore can be made persistent across -sessions. +saved to a `VectorStore` backing database. The `VectorStore` can be made persistent +across sessions. """ import warnings @@ -53,13 +53,13 @@ class ConversationVectorStoreTokenBufferMemory(ConversationTokenBufferMemory): accepts the following additional arguments retriever: (required) A VectorStoreRetriever object to use - as the vector backing store + as the vector backing store split_chunk_size: (optional, 1000) Token chunk split size - for long messages generated by the AI + for long messages generated by the AI previous_history_template: (optional) Template used to format - the contents of the prompt history + the contents of the prompt history Example using ChromaDB: @@ -157,7 +157,7 @@ class ConversationVectorStoreTokenBufferMemory(ConversationTokenBufferMemory): def save_remainder(self) -> None: """Save the remainder of the conversation buffer to the vector store. - This is useful if you have made the vectorstore persistent, in which + Useful if you have made the VectorStore persistent, in which case this can be called before the end of the session to store the remainder of the conversation. """ diff --git a/libs/langchain/langchain_classic/model_laboratory.py b/libs/langchain/langchain_classic/model_laboratory.py index 81f903a51be..31700b5bafc 100644 --- a/libs/langchain/langchain_classic/model_laboratory.py +++ b/libs/langchain/langchain_classic/model_laboratory.py @@ -21,8 +21,8 @@ class ModelLaboratory: Args: chains: A sequence of chains to experiment with. Each chain must have exactly one input and one output variable. - names (list[str] | None): Optional list of names corresponding to each - chain. If provided, its length must match the number of chains. + names: Optional list of names corresponding to each chain. + If provided, its length must match the number of chains. Raises: @@ -72,7 +72,7 @@ class ModelLaboratory: If provided, the prompt must contain exactly one input variable. Returns: - ModelLaboratory: An instance of `ModelLaboratory` initialized with LLMs. + An instance of `ModelLaboratory` initialized with LLMs. """ if prompt is None: prompt = PromptTemplate(input_variables=["_input"], template="{_input}") diff --git a/libs/langchain/langchain_classic/output_parsers/regex_dict.py b/libs/langchain/langchain_classic/output_parsers/regex_dict.py index 5bcafb37156..f0052958c33 100644 --- a/libs/langchain/langchain_classic/output_parsers/regex_dict.py +++ b/libs/langchain/langchain_classic/output_parsers/regex_dict.py @@ -8,7 +8,7 @@ from langchain_core.output_parsers import BaseOutputParser class RegexDictParser(BaseOutputParser[dict[str, str]]): """Parse the output of an LLM call into a Dictionary using a regex.""" - regex_pattern: str = r"{}:\s?([^.'\n']*)\.?" # : :meta private: + regex_pattern: str = r"{}:\s?([^.'\n']*)\.?" """The regex pattern to use to parse the output.""" output_key_to_format: dict[str, str] """The keys to use for the output.""" diff --git a/libs/langchain/langchain_classic/output_parsers/structured.py b/libs/langchain/langchain_classic/output_parsers/structured.py index b5efae9060c..346b0ff9f19 100644 --- a/libs/langchain/langchain_classic/output_parsers/structured.py +++ b/libs/langchain/langchain_classic/output_parsers/structured.py @@ -96,8 +96,8 @@ class StructuredOutputParser(BaseOutputParser[dict[str, Any]]): # ``` Args: - only_json (bool): If `True`, only the json in the Markdown code snippet - will be returned, without the introducing text. Defaults to `False`. + only_json: If `True`, only the json in the Markdown code snippet + will be returned, without the introducing text. """ schema_str = "\n".join( [_get_sub_string(schema) for schema in self.response_schemas], diff --git a/libs/langchain/langchain_classic/output_parsers/yaml.py b/libs/langchain/langchain_classic/output_parsers/yaml.py index d6c7bfebae9..b286cf5c875 100644 --- a/libs/langchain/langchain_classic/output_parsers/yaml.py +++ b/libs/langchain/langchain_classic/output_parsers/yaml.py @@ -16,10 +16,10 @@ T = TypeVar("T", bound=BaseModel) class YamlOutputParser(BaseOutputParser[T]): - """Parse YAML output using a pydantic model.""" + """Parse YAML output using a Pydantic model.""" pydantic_object: type[T] - """The pydantic model to parse.""" + """The Pydantic model to parse.""" pattern: re.Pattern = re.compile( r"^```(?:ya?ml)?(?P[^`]*)", re.MULTILINE | re.DOTALL, diff --git a/libs/langchain/langchain_classic/pydantic_v1/__init__.py b/libs/langchain/langchain_classic/pydantic_v1/__init__.py deleted file mode 100644 index 8fceb791cc2..00000000000 --- a/libs/langchain/langchain_classic/pydantic_v1/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -from importlib import metadata - -from langchain_core._api import warn_deprecated - -## Create namespaces for pydantic v1 and v2. -# This code must stay at the top of the file before other modules may -# attempt to import pydantic since it adds pydantic_v1 and pydantic_v2 to sys.modules. -# -# This hack is done for the following reasons: -# * LangChain will attempt to remain compatible with both pydantic v1 and v2 since -# both dependencies and dependents may be stuck on either version of v1 or v2. -# * Creating namespaces for pydantic v1 and v2 should allow us to write code that -# unambiguously uses either v1 or v2 API. -# * This change is easier to roll out and roll back. -from pydantic.v1 import * # noqa: F403 - -try: - _PYDANTIC_MAJOR_VERSION: int = int(metadata.version("pydantic").split(".")[0]) -except metadata.PackageNotFoundError: - _PYDANTIC_MAJOR_VERSION = 0 - -warn_deprecated( - "0.3.0", - removal="1.0.0", - alternative="pydantic.v1 or pydantic", - message=( - "As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. " - "The langchain.pydantic_v1 module was a " - "compatibility shim for pydantic v1, and should no longer be used. " - "Please update the code to import from Pydantic directly.\n\n" - "For example, replace imports like: " - "`from langchain_classic.pydantic_v1 import BaseModel`\n" - "with: `from pydantic import BaseModel`\n" - "or the v1 compatibility namespace if you are working in a code base " - "that has not been fully upgraded to pydantic 2 yet. " - "\tfrom pydantic.v1 import BaseModel\n" - ), -) diff --git a/libs/langchain/langchain_classic/pydantic_v1/dataclasses.py b/libs/langchain/langchain_classic/pydantic_v1/dataclasses.py deleted file mode 100644 index 78788016bb7..00000000000 --- a/libs/langchain/langchain_classic/pydantic_v1/dataclasses.py +++ /dev/null @@ -1,20 +0,0 @@ -from langchain_core._api import warn_deprecated -from pydantic.v1.dataclasses import * # noqa: F403 - -warn_deprecated( - "0.3.0", - removal="1.0.0", - alternative="pydantic.v1 or pydantic", - message=( - "As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. " - "The langchain.pydantic_v1 module was a " - "compatibility shim for pydantic v1, and should no longer be used. " - "Please update the code to import from Pydantic directly.\n\n" - "For example, replace imports like: " - "`from langchain_classic.pydantic_v1 import BaseModel`\n" - "with: `from pydantic import BaseModel`\n" - "or the v1 compatibility namespace if you are working in a code base " - "that has not been fully upgraded to pydantic 2 yet. " - "\tfrom pydantic.v1 import BaseModel\n" - ), -) diff --git a/libs/langchain/langchain_classic/pydantic_v1/main.py b/libs/langchain/langchain_classic/pydantic_v1/main.py deleted file mode 100644 index 1895c08fb69..00000000000 --- a/libs/langchain/langchain_classic/pydantic_v1/main.py +++ /dev/null @@ -1,20 +0,0 @@ -from langchain_core._api import warn_deprecated -from pydantic.v1.main import * # noqa: F403 - -warn_deprecated( - "0.3.0", - removal="1.0.0", - alternative="pydantic.v1 or pydantic", - message=( - "As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. " - "The langchain.pydantic_v1 module was a " - "compatibility shim for pydantic v1, and should no longer be used. " - "Please update the code to import from Pydantic directly.\n\n" - "For example, replace imports like: " - "`from langchain_classic.pydantic_v1 import BaseModel`\n" - "with: `from pydantic import BaseModel`\n" - "or the v1 compatibility namespace if you are working in a code base " - "that has not been fully upgraded to pydantic 2 yet. " - "\tfrom pydantic.v1 import BaseModel\n" - ), -) diff --git a/libs/langchain/langchain_classic/retrievers/document_compressors/cohere_rerank.py b/libs/langchain/langchain_classic/retrievers/document_compressors/cohere_rerank.py index 8930f052c15..9ceb69d8b6f 100644 --- a/libs/langchain/langchain_classic/retrievers/document_compressors/cohere_rerank.py +++ b/libs/langchain/langchain_classic/retrievers/document_compressors/cohere_rerank.py @@ -75,7 +75,6 @@ class CohereRerank(BaseDocumentCompressor): documents: A sequence of documents to rerank. model: The model to use for re-ranking. Default to self.model. top_n : The number of results to return. If `None` returns all results. - Defaults to self.top_n. max_chunks_per_doc : The maximum number of chunks derived from a document. """ # noqa: E501 if len(documents) == 0: # to avoid empty api call diff --git a/libs/langchain/langchain_classic/retrievers/document_compressors/embeddings_filter.py b/libs/langchain/langchain_classic/retrievers/document_compressors/embeddings_filter.py index 3bb99c00946..885596ddae3 100644 --- a/libs/langchain/langchain_classic/retrievers/document_compressors/embeddings_filter.py +++ b/libs/langchain/langchain_classic/retrievers/document_compressors/embeddings_filter.py @@ -33,8 +33,8 @@ class EmbeddingsFilter(BaseDocumentCompressor): two matrices (List[List[float]]) and return a matrix of scores where higher values indicate greater similarity.""" k: int | None = 20 - """The number of relevant documents to return. Can be set to None, in which case - `similarity_threshold` must be specified. Defaults to 20.""" + """The number of relevant documents to return. Can be set to `None`, in which case + `similarity_threshold` must be specified.""" similarity_threshold: float | None = None """Threshold for determining when two documents are similar enough to be considered redundant. Defaults to `None`, must be specified if `k` is set diff --git a/libs/langchain/langchain_classic/retrievers/ensemble.py b/libs/langchain/langchain_classic/retrievers/ensemble.py index bdce26c6e2e..2e0ea312f15 100644 --- a/libs/langchain/langchain_classic/retrievers/ensemble.py +++ b/libs/langchain/langchain_classic/retrievers/ensemble.py @@ -61,7 +61,6 @@ class EnsembleRetriever(BaseRetriever): weighting for all retrievers. c: A constant added to the rank, controlling the balance between the importance of high-ranked items and the consideration given to lower-ranked items. - Default is 60. id_key: The key in the document's metadata used to determine unique documents. If not specified, page_content is used. """ @@ -299,8 +298,8 @@ class EnsembleRetriever(BaseRetriever): doc_lists: A list of rank lists, where each rank list contains unique items. Returns: - list: The final aggregated list of items sorted by their weighted RRF - scores in descending order. + The final aggregated list of items sorted by their weighted RRF + scores in descending order. """ if len(doc_lists) != len(self.weights): msg = "Number of rank lists must be equal to the number of weights." diff --git a/libs/langchain/langchain_classic/retrievers/multi_vector.py b/libs/langchain/langchain_classic/retrievers/multi_vector.py index d603424f7a3..1a569b7e13b 100644 --- a/libs/langchain/langchain_classic/retrievers/multi_vector.py +++ b/libs/langchain/langchain_classic/retrievers/multi_vector.py @@ -30,7 +30,7 @@ class MultiVectorRetriever(BaseRetriever): """Retrieve from a set of multiple embeddings for the same document.""" vectorstore: VectorStore - """The underlying vectorstore to use to store small chunks + """The underlying `VectorStore` to use to store small chunks and their embedding vectors""" byte_store: ByteStore | None = None """The lower-level backing storage layer for the parent documents""" @@ -86,7 +86,7 @@ class MultiVectorRetriever(BaseRetriever): else: sub_docs = self.vectorstore.similarity_search(query, **self.search_kwargs) - # We do this to maintain the order of the ids that are returned + # We do this to maintain the order of the IDs that are returned ids = [] for d in sub_docs: if self.id_key in d.metadata and d.metadata[self.id_key] not in ids: @@ -128,7 +128,7 @@ class MultiVectorRetriever(BaseRetriever): **self.search_kwargs, ) - # We do this to maintain the order of the ids that are returned + # We do this to maintain the order of the IDs that are returned ids = [] for d in sub_docs: if self.id_key in d.metadata and d.metadata[self.id_key] not in ids: diff --git a/libs/langchain/langchain_classic/retrievers/parent_document_retriever.py b/libs/langchain/langchain_classic/retrievers/parent_document_retriever.py index 9d4e47d2bae..635f320f21f 100644 --- a/libs/langchain/langchain_classic/retrievers/parent_document_retriever.py +++ b/libs/langchain/langchain_classic/retrievers/parent_document_retriever.py @@ -21,7 +21,7 @@ class ParentDocumentRetriever(MultiVectorRetriever): The ParentDocumentRetriever strikes that balance by splitting and storing small chunks of data. During retrieval, it first fetches the small chunks - but then looks up the parent ids for those chunks and returns those larger + but then looks up the parent IDs for those chunks and returns those larger documents. Note that "parent document" refers to the document that a small chunk @@ -44,7 +44,7 @@ class ParentDocumentRetriever(MultiVectorRetriever): child_splitter = RecursiveCharacterTextSplitter( chunk_size=400, add_start_index=True ) - # The vectorstore to use to index the child chunks + # The VectorStore to use to index the child chunks vectorstore = Chroma(embedding_function=OpenAIEmbeddings()) # The storage layer for the parent documents store = InMemoryStore() @@ -85,7 +85,7 @@ class ParentDocumentRetriever(MultiVectorRetriever): if ids is None: doc_ids = [str(uuid.uuid4()) for _ in documents] if not add_to_docstore: - msg = "If ids are not passed in, `add_to_docstore` MUST be True" + msg = "If IDs are not passed in, `add_to_docstore` MUST be True" raise ValueError(msg) else: if len(documents) != len(ids): @@ -124,16 +124,16 @@ class ParentDocumentRetriever(MultiVectorRetriever): Args: documents: List of documents to add - ids: Optional list of ids for documents. If provided should be the same + ids: Optional list of IDs for documents. If provided should be the same length as the list of documents. Can be provided if parent documents are already in the document store and you don't want to re-add to the docstore. If not provided, random UUIDs will be used as - ids. + IDs. add_to_docstore: Boolean of whether to add documents to docstore. This can be false if and only if `ids` are provided. You may want to set this to False if the documents are already in the docstore and you don't want to re-add them. - **kwargs: additional keyword arguments passed to the vectorstore. + **kwargs: additional keyword arguments passed to the `VectorStore`. """ docs, full_docs = self._split_docs_for_adding( documents, @@ -155,16 +155,16 @@ class ParentDocumentRetriever(MultiVectorRetriever): Args: documents: List of documents to add - ids: Optional list of ids for documents. If provided should be the same + ids: Optional list of IDs for documents. If provided should be the same length as the list of documents. Can be provided if parent documents are already in the document store and you don't want to re-add to the docstore. If not provided, random UUIDs will be used as - ids. + idIDss. add_to_docstore: Boolean of whether to add documents to docstore. This can be false if and only if `ids` are provided. You may want to set this to False if the documents are already in the docstore and you don't want to re-add them. - **kwargs: additional keyword arguments passed to the vectorstore. + **kwargs: additional keyword arguments passed to the `VectorStore`. """ docs, full_docs = self._split_docs_for_adding( documents, diff --git a/libs/langchain/langchain_classic/retrievers/self_query/base.py b/libs/langchain/langchain_classic/retrievers/self_query/base.py index 624143dd188..152a7de6949 100644 --- a/libs/langchain/langchain_classic/retrievers/self_query/base.py +++ b/libs/langchain/langchain_classic/retrievers/self_query/base.py @@ -251,7 +251,7 @@ class SelfQueryRetriever(BaseRetriever): search_kwargs: dict = Field(default_factory=dict) """Keyword arguments to pass in to the vector store search.""" structured_query_translator: Visitor - """Translator for turning internal query language into vectorstore search params.""" + """Translator for turning internal query language into `VectorStore` search params.""" # noqa: E501 verbose: bool = False use_original_query: bool = False @@ -360,7 +360,7 @@ class SelfQueryRetriever(BaseRetriever): queried. metadata_field_info: Metadata field information for the documents. structured_query_translator: Optional translator for turning internal query - language into vectorstore search params. + language into `VectorStore` search params. chain_kwargs: Additional keyword arguments for the query constructor. enable_limit: Whether to enable the limit operator. use_original_query: Whether to use the original query instead of the revised diff --git a/libs/langchain/langchain_classic/retrievers/time_weighted_retriever.py b/libs/langchain/langchain_classic/retrievers/time_weighted_retriever.py index 9823318baa8..7cba23e4bd3 100644 --- a/libs/langchain/langchain_classic/retrievers/time_weighted_retriever.py +++ b/libs/langchain/langchain_classic/retrievers/time_weighted_retriever.py @@ -25,17 +25,17 @@ class TimeWeightedVectorStoreRetriever(BaseRetriever): """ vectorstore: VectorStore - """The vectorstore to store documents and determine salience.""" + """The `VectorStore` to store documents and determine salience.""" search_kwargs: dict = Field(default_factory=lambda: {"k": 100}) - """Keyword arguments to pass to the vectorstore similarity search.""" + """Keyword arguments to pass to the `VectorStore` similarity search.""" # TODO: abstract as a queue memory_stream: list[Document] = Field(default_factory=list) """The memory_stream of documents to search through.""" decay_rate: float = Field(default=0.01) - """The exponential decay factor used as (1.0-decay_rate)**(hrs_passed).""" + """The exponential decay factor used as `(1.0-decay_rate)**(hrs_passed)`.""" k: int = 4 """The maximum number of documents to retrieve in a given call.""" diff --git a/libs/langchain/langchain_classic/runnables/hub.py b/libs/langchain/langchain_classic/runnables/hub.py index 9d0bdbf56c0..4c6a358fce9 100644 --- a/libs/langchain/langchain_classic/runnables/hub.py +++ b/libs/langchain/langchain_classic/runnables/hub.py @@ -17,7 +17,7 @@ class HubRunnable(RunnableBindingBase[Input, Output]): # type: ignore[no-redef] api_key: str | None = None, **kwargs: Any, ) -> None: - """Initialize the HubRunnable. + """Initialize the `HubRunnable`. Args: owner_repo_commit: The full name of the prompt to pull from in the format of diff --git a/libs/langchain/langchain_classic/runnables/openai_functions.py b/libs/langchain/langchain_classic/runnables/openai_functions.py index 38434559a98..29f2194c855 100644 --- a/libs/langchain/langchain_classic/runnables/openai_functions.py +++ b/libs/langchain/langchain_classic/runnables/openai_functions.py @@ -10,7 +10,7 @@ from typing_extensions import TypedDict class OpenAIFunction(TypedDict): - """A function description for ChatOpenAI.""" + """A function description for `ChatOpenAI`.""" name: str """The name of the function.""" @@ -33,7 +33,7 @@ class OpenAIFunctionsRouter(RunnableBindingBase[BaseMessage, Any]): # type: ign ], functions: list[OpenAIFunction] | None = None, ): - """Initialize the OpenAIFunctionsRouter. + """Initialize the `OpenAIFunctionsRouter`. Args: runnables: A mapping of function names to runnables. diff --git a/libs/langchain/langchain_classic/schema/__init__.py b/libs/langchain/langchain_classic/schema/__init__.py index 6c8c4f61a0a..c957fe8b9a5 100644 --- a/libs/langchain/langchain_classic/schema/__init__.py +++ b/libs/langchain/langchain_classic/schema/__init__.py @@ -5,7 +5,6 @@ from langchain_core.caches import BaseCache from langchain_core.chat_history import BaseChatMessageHistory from langchain_core.documents import BaseDocumentTransformer, Document from langchain_core.exceptions import LangChainException, OutputParserException -from langchain_core.memory import BaseMemory from langchain_core.messages import ( AIMessage, BaseMessage, @@ -36,6 +35,8 @@ from langchain_core.prompts import BasePromptTemplate, format_document from langchain_core.retrievers import BaseRetriever from langchain_core.stores import BaseStore +from langchain_classic.base_memory import BaseMemory + RUN_KEY = "__run" # Backwards compatibility. diff --git a/libs/langchain/langchain_classic/schema/callbacks/tracers/schemas.py b/libs/langchain/langchain_classic/schema/callbacks/tracers/schemas.py index e8f34027d34..32e6b2e4f13 100644 --- a/libs/langchain/langchain_classic/schema/callbacks/tracers/schemas.py +++ b/libs/langchain/langchain_classic/schema/callbacks/tracers/schemas.py @@ -1,27 +1,5 @@ -from langchain_core.tracers.schemas import ( - BaseRun, - ChainRun, - LLMRun, - Run, - RunTypeEnum, - ToolRun, - TracerSession, - TracerSessionBase, - TracerSessionV1, - TracerSessionV1Base, - TracerSessionV1Create, -) +from langchain_core.tracers.schemas import Run __all__ = [ - "BaseRun", - "ChainRun", - "LLMRun", "Run", - "RunTypeEnum", - "ToolRun", - "TracerSession", - "TracerSessionBase", - "TracerSessionV1", - "TracerSessionV1Base", - "TracerSessionV1Create", ] diff --git a/libs/langchain/langchain_classic/schema/memory.py b/libs/langchain/langchain_classic/schema/memory.py index d2f3d73e613..238d3283936 100644 --- a/libs/langchain/langchain_classic/schema/memory.py +++ b/libs/langchain/langchain_classic/schema/memory.py @@ -1,3 +1,3 @@ -from langchain_core.memory import BaseMemory +from langchain_classic.base_memory import BaseMemory __all__ = ["BaseMemory"] diff --git a/libs/langchain/langchain_classic/smith/__init__.py b/libs/langchain/langchain_classic/smith/__init__.py index 4f06ae3af34..a744151182c 100644 --- a/libs/langchain/langchain_classic/smith/__init__.py +++ b/libs/langchain/langchain_classic/smith/__init__.py @@ -1,9 +1,7 @@ """**LangSmith** utilities. This module provides utilities for connecting to -[LangSmith](https://smith.langchain.com/). -For more information on LangSmith, -see the [LangSmith documentation](https://docs.smith.langchain.com/). +[LangSmith](https://docs.langchain.com/langsmith/home). **Evaluation** @@ -22,8 +20,8 @@ from langchain_classic.smith import RunEvalConfig, run_on_dataset # Chains may have memory. Passing in a constructor function lets the # evaluation framework avoid cross-contamination between runs. def construct_chain(): - llm = ChatOpenAI(temperature=0) - chain = LLMChain.from_string(llm, "What's the answer to {your_input_key}") + model = ChatOpenAI(temperature=0) + chain = LLMChain.from_string(model, "What's the answer to {your_input_key}") return chain diff --git a/libs/langchain/langchain_classic/smith/evaluation/__init__.py b/libs/langchain/langchain_classic/smith/evaluation/__init__.py index 78e7fc70ab5..755b3c14d17 100644 --- a/libs/langchain/langchain_classic/smith/evaluation/__init__.py +++ b/libs/langchain/langchain_classic/smith/evaluation/__init__.py @@ -4,7 +4,7 @@ This module provides utilities for evaluating Chains and other language model applications using LangChain evaluators and LangSmith. For more information on the LangSmith API, see the -[LangSmith API documentation](https://docs.smith.langchain.com/docs/). +[LangSmith API documentation](https://docs.langchain.com/langsmith/home). **Example** @@ -16,8 +16,8 @@ from langchain_classic.smith import EvaluatorType, RunEvalConfig, run_on_dataset def construct_chain(): - llm = ChatOpenAI(temperature=0) - chain = LLMChain.from_string(llm, "What's the answer to {your_input_key}") + model = ChatOpenAI(temperature=0) + chain = LLMChain.from_string(model, "What's the answer to {your_input_key}") return chain diff --git a/libs/langchain/langchain_classic/smith/evaluation/config.py b/libs/langchain/langchain_classic/smith/evaluation/config.py index 6f3063ce6f2..6d1df6232b3 100644 --- a/libs/langchain/langchain_classic/smith/evaluation/config.py +++ b/libs/langchain/langchain_classic/smith/evaluation/config.py @@ -34,28 +34,17 @@ BATCH_EVALUATOR_LIKE = Callable[ class EvalConfig(BaseModel): """Configuration for a given run evaluator. - Parameters - ---------- - evaluator_type : EvaluatorType - The type of evaluator to use. - - Methods: - ------- - get_kwargs() - Get the keyword arguments for the evaluator configuration. - + Attributes: + evaluator_type: The type of evaluator to use. """ evaluator_type: EvaluatorType def get_kwargs(self) -> dict[str, Any]: - """Get the keyword arguments for the load_evaluator call. + """Get the keyword arguments for the `load_evaluator` call. Returns: - ------- - Dict[str, Any] - The keyword arguments for the load_evaluator call. - + The keyword arguments for the `load_evaluator` call. """ kwargs = {} for field, val in self: @@ -110,7 +99,7 @@ class RunEvalConfig(BaseModel): batch_evaluators: list[BATCH_EVALUATOR_LIKE] | None = None """Evaluators that run on an aggregate/batch level. - These generate 1 or more metrics that are assigned to the full test run. + These generate one or more metrics that are assigned to the full test run. As a result, they are not associated with individual traces. """ @@ -134,13 +123,9 @@ class RunEvalConfig(BaseModel): class Criteria(SingleKeyEvalConfig): """Configuration for a reference-free criteria evaluator. - Parameters - ---------- - criteria : CRITERIA_TYPE | None - The criteria to evaluate. - llm : BaseLanguageModel | None - The language model to use for the evaluation chain. - + Attributes: + criteria: The criteria to evaluate. + llm: The language model to use for the evaluation chain. """ criteria: CRITERIA_TYPE | None = None @@ -150,12 +135,9 @@ class RunEvalConfig(BaseModel): class LabeledCriteria(SingleKeyEvalConfig): """Configuration for a labeled (with references) criteria evaluator. - Parameters - ---------- - criteria : CRITERIA_TYPE | None - The criteria to evaluate. - llm : BaseLanguageModel | None - The language model to use for the evaluation chain. + Attributes: + criteria: The criteria to evaluate. + llm: The language model to use for the evaluation chain. """ criteria: CRITERIA_TYPE | None = None @@ -165,14 +147,9 @@ class RunEvalConfig(BaseModel): class EmbeddingDistance(SingleKeyEvalConfig): """Configuration for an embedding distance evaluator. - Parameters - ---------- - embeddings : Optional[Embeddings] - The embeddings to use for computing the distance. - - distance_metric : Optional[EmbeddingDistanceEnum] - The distance metric to use for computing the distance. - + Attributes: + embeddings: The embeddings to use for computing the distance. + distance_metric: The distance metric to use for computing the distance. """ evaluator_type: EvaluatorType = EvaluatorType.EMBEDDING_DISTANCE @@ -186,34 +163,23 @@ class RunEvalConfig(BaseModel): class StringDistance(SingleKeyEvalConfig): """Configuration for a string distance evaluator. - Parameters - ---------- - distance : Optional[StringDistanceEnum] - The string distance metric to use. - + Attributes: + distance: The string distance metric to use (`damerau_levenshtein`, + `levenshtein`, `jaro`, or `jaro_winkler`). + normalize_score: Whether to normalize the distance to between 0 and 1. + Applies only to the Levenshtein and Damerau-Levenshtein distances. """ evaluator_type: EvaluatorType = EvaluatorType.STRING_DISTANCE distance: StringDistanceEnum | None = None - """The string distance metric to use. - damerau_levenshtein: The Damerau-Levenshtein distance. - levenshtein: The Levenshtein distance. - jaro: The Jaro distance. - jaro_winkler: The Jaro-Winkler distance. - """ normalize_score: bool = True - """Whether to normalize the distance to between 0 and 1. - Applies only to the Levenshtein and Damerau-Levenshtein distances.""" class QA(SingleKeyEvalConfig): """Configuration for a QA evaluator. - Parameters - ---------- - prompt : Optional[BasePromptTemplate] - The prompt template to use for generating the question. - llm : BaseLanguageModel | None - The language model to use for the evaluation chain. + Attributes: + prompt: The prompt template to use for generating the question. + llm: The language model to use for the evaluation chain. """ evaluator_type: EvaluatorType = EvaluatorType.QA @@ -223,13 +189,9 @@ class RunEvalConfig(BaseModel): class ContextQA(SingleKeyEvalConfig): """Configuration for a context-based QA evaluator. - Parameters - ---------- - prompt : Optional[BasePromptTemplate] - The prompt template to use for generating the question. - llm : BaseLanguageModel | None - The language model to use for the evaluation chain. - + Attributes: + prompt: The prompt template to use for generating the question. + llm: The language model to use for the evaluation chain. """ evaluator_type: EvaluatorType = EvaluatorType.CONTEXT_QA @@ -239,13 +201,9 @@ class RunEvalConfig(BaseModel): class CoTQA(SingleKeyEvalConfig): """Configuration for a context-based QA evaluator. - Parameters - ---------- - prompt : Optional[BasePromptTemplate] - The prompt template to use for generating the question. - llm : BaseLanguageModel | None - The language model to use for the evaluation chain. - + Attributes: + prompt: The prompt template to use for generating the question. + llm: The language model to use for the evaluation chain. """ evaluator_type: EvaluatorType = EvaluatorType.CONTEXT_QA @@ -253,34 +211,22 @@ class RunEvalConfig(BaseModel): prompt: BasePromptTemplate | None = None class JsonValidity(SingleKeyEvalConfig): - """Configuration for a json validity evaluator. - - Parameters - ---------- - """ + """Configuration for a json validity evaluator.""" evaluator_type: EvaluatorType = EvaluatorType.JSON_VALIDITY class JsonEqualityEvaluator(EvalConfig): - """Configuration for a json equality evaluator. - - Parameters - ---------- - """ + """Configuration for a json equality evaluator.""" evaluator_type: EvaluatorType = EvaluatorType.JSON_EQUALITY class ExactMatch(SingleKeyEvalConfig): """Configuration for an exact match string evaluator. - Parameters - ---------- - ignore_case : bool - Whether to ignore case when comparing strings. - ignore_punctuation : bool - Whether to ignore punctuation when comparing strings. - ignore_numbers : bool - Whether to ignore numbers when comparing strings. + Attributes: + ignore_case: Whether to ignore case when comparing strings. + ignore_punctuation: Whether to ignore punctuation when comparing strings. + ignore_numbers: Whether to ignore numbers when comparing strings. """ evaluator_type: EvaluatorType = EvaluatorType.EXACT_MATCH @@ -291,10 +237,8 @@ class RunEvalConfig(BaseModel): class RegexMatch(SingleKeyEvalConfig): """Configuration for a regex match string evaluator. - Parameters - ---------- - flags : int - The flags to pass to the regex. Example: re.IGNORECASE. + Attributes: + flags: The flags to pass to the regex. Example: `re.IGNORECASE`. """ evaluator_type: EvaluatorType = EvaluatorType.REGEX_MATCH @@ -309,17 +253,12 @@ class RunEvalConfig(BaseModel): It is recommended to normalize these scores by setting `normalize_by` to 10. - Parameters - ---------- - criteria : CRITERIA_TYPE | None - The criteria to evaluate. - llm : BaseLanguageModel | None - The language model to use for the evaluation chain. - normalize_by: int | None = None - If you want to normalize the score, the denominator to use. - If not provided, the score will be between 1 and 10 (by default). - prompt : Optional[BasePromptTemplate] - + Attributes: + criteria: The criteria to evaluate. + llm: The language model to use for the evaluation chain. + normalize_by: If you want to normalize the score, the denominator to use. + If not provided, the score will be between 1 and 10. + prompt: The prompt template to use for evaluation. """ evaluator_type: EvaluatorType = EvaluatorType.SCORE_STRING diff --git a/libs/langchain/langchain_classic/smith/evaluation/progress.py b/libs/langchain/langchain_classic/smith/evaluation/progress.py index 42862e3d1af..d0d2e750aa6 100644 --- a/libs/langchain/langchain_classic/smith/evaluation/progress.py +++ b/libs/langchain/langchain_classic/smith/evaluation/progress.py @@ -23,10 +23,10 @@ class ProgressBarCallback(base_callbacks.BaseCallbackHandler): """Initialize the progress bar. Args: - total: int, the total number of items to be processed. - ncols: int, the character width of the progress bar. - end_with: str, last string to print after progress bar reaches end. - **kwargs: additional keyword arguments. + total: The total number of items to be processed. + ncols: The character width of the progress bar. + end_with: Last string to print after progress bar reaches end. + **kwargs: Additional keyword arguments. """ self.total = total self.ncols = ncols diff --git a/libs/langchain/langchain_classic/smith/evaluation/runner_utils.py b/libs/langchain/langchain_classic/smith/evaluation/runner_utils.py index d52e037eb8b..e8f25cb2be7 100644 --- a/libs/langchain/langchain_classic/smith/evaluation/runner_utils.py +++ b/libs/langchain/langchain_classic/smith/evaluation/runner_utils.py @@ -153,7 +153,7 @@ class EvalError(dict): """Your architecture raised an error.""" def __init__(self, Error: BaseException, **kwargs: Any) -> None: # noqa: N803 - """Initialize the EvalError with an error and additional attributes. + """Initialize the `EvalError` with an error and additional attributes. Args: Error: The error that occurred. @@ -162,7 +162,7 @@ class EvalError(dict): super().__init__(Error=Error, **kwargs) def __getattr__(self, name: str) -> Any: - """Get an attribute from the EvalError. + """Get an attribute from the `EvalError`. Args: name: The name of the attribute to get. @@ -982,8 +982,7 @@ def _run_llm_or_chain( input_mapper: Optional function to map the input to the expected format. Returns: - Union[List[dict], List[str], List[LLMResult], List[ChatResult]]: - The outputs of the model or chain. + The outputs of the model or chain. """ chain_or_llm = ( "LLM" if isinstance(llm_or_chain_factory, BaseLanguageModel) else "Chain" @@ -1372,7 +1371,7 @@ async def arun_on_dataset( dataset_version: Optional version of the dataset. concurrency_level: The number of async tasks to run concurrently. project_name: Name of the project to store the traces in. - Defaults to {dataset_name}-{chain class name}-{datetime}. + Defaults to `{dataset_name}-{chain class name}-{datetime}`. project_metadata: Optional metadata to add to the project. Useful for storing information the test variant. (prompt version, model version, etc.) @@ -1384,7 +1383,7 @@ async def arun_on_dataset( **kwargs: Should not be used, but is provided for backwards compatibility. Returns: - A dictionary containing the run's project name and the resulting model outputs. + `dict` containing the run's project name and the resulting model outputs. Examples: ```python @@ -1396,9 +1395,9 @@ async def arun_on_dataset( # Chains may have memory. Passing in a constructor function lets the # evaluation framework avoid cross-contamination between runs. def construct_chain(): - llm = ChatOpenAI(temperature=0) + model = ChatOpenAI(temperature=0) chain = LLMChain.from_string( - llm, + model, "What's the answer to {your_input_key}" ) return chain @@ -1424,9 +1423,8 @@ async def arun_on_dataset( evaluation=evaluation_config, ) ``` - You can also create custom evaluators by subclassing the - `StringEvaluator ` - or LangSmith's `RunEvaluator` classes. + You can also create custom evaluators by subclassing the `StringEvaluator or + LangSmith's `RunEvaluator` classes. ```python from typing import Optional @@ -1547,7 +1545,7 @@ def run_on_dataset( dataset_version: Optional version of the dataset. concurrency_level: The number of async tasks to run concurrently. project_name: Name of the project to store the traces in. - Defaults to {dataset_name}-{chain class name}-{datetime}. + Defaults to `{dataset_name}-{chain class name}-{datetime}`. project_metadata: Optional metadata to add to the project. Useful for storing information the test variant. (prompt version, model version, etc.) @@ -1559,7 +1557,7 @@ def run_on_dataset( **kwargs: Should not be used, but is provided for backwards compatibility. Returns: - A dictionary containing the run's project name and the resulting model outputs. + `dict` containing the run's project name and the resulting model outputs. Examples: ```python @@ -1571,9 +1569,9 @@ def run_on_dataset( # Chains may have memory. Passing in a constructor function lets the # evaluation framework avoid cross-contamination between runs. def construct_chain(): - llm = ChatOpenAI(temperature=0) + model = ChatOpenAI(temperature=0) chain = LLMChain.from_string( - llm, + model, "What's the answer to {your_input_key}" ) return chain @@ -1600,9 +1598,8 @@ def run_on_dataset( ) ``` - You can also create custom evaluators by subclassing the - `StringEvaluator ` - or LangSmith's `RunEvaluator` classes. + You can also create custom evaluators by subclassing the `StringEvaluator` or + LangSmith's `RunEvaluator` classes. ```python from typing import Optional diff --git a/libs/langchain/langchain_classic/storage/_lc_store.py b/libs/langchain/langchain_classic/storage/_lc_store.py index f4f64939ea9..00a50b15e47 100644 --- a/libs/langchain/langchain_classic/storage/_lc_store.py +++ b/libs/langchain/langchain_classic/storage/_lc_store.py @@ -11,12 +11,12 @@ from langchain_classic.storage.encoder_backed import EncoderBackedStore def _dump_as_bytes(obj: Serializable) -> bytes: - """Return a bytes representation of a document.""" + """Return a bytes representation of a `Document`.""" return dumps(obj).encode("utf-8") def _dump_document_as_bytes(obj: Any) -> bytes: - """Return a bytes representation of a document.""" + """Return a bytes representation of a `Document`.""" if not isinstance(obj, Document): msg = "Expected a Document instance" raise TypeError(msg) @@ -50,14 +50,14 @@ def create_lc_store( *, key_encoder: Callable[[str], str] | None = None, ) -> BaseStore[str, Serializable]: - """Create a store for langchain serializable objects from a bytes store. + """Create a store for LangChain serializable objects from a bytes store. Args: store: A bytes store to use as the underlying store. - key_encoder: A function to encode keys; if None uses identity function. + key_encoder: A function to encode keys; if `None` uses identity function. Returns: - A key-value store for documents. + A key-value store for `Document` objects. """ return EncoderBackedStore( store, @@ -72,17 +72,17 @@ def create_kv_docstore( *, key_encoder: Callable[[str], str] | None = None, ) -> BaseStore[str, Document]: - """Create a store for langchain Document objects from a bytes store. + """Create a store for langchain `Document` objects from a bytes store. This store does run time type checking to ensure that the values are - Document objects. + `Document` objects. Args: store: A bytes store to use as the underlying store. - key_encoder: A function to encode keys; if None uses identity function. + key_encoder: A function to encode keys; if `None`, uses identity function. Returns: - A key-value store for documents. + A key-value store for `Document` objects. """ return EncoderBackedStore( store, diff --git a/libs/langchain/langchain_classic/storage/encoder_backed.py b/libs/langchain/langchain_classic/storage/encoder_backed.py index c3a02acadff..0e5cefb7a82 100644 --- a/libs/langchain/langchain_classic/storage/encoder_backed.py +++ b/libs/langchain/langchain_classic/storage/encoder_backed.py @@ -56,14 +56,29 @@ class EncoderBackedStore(BaseStore[K, V]): value_serializer: Callable[[V], bytes], value_deserializer: Callable[[Any], V], ) -> None: - """Initialize an EncodedStore.""" + """Initialize an `EncodedStore`. + + Args: + store: The underlying byte store to wrap. + key_encoder: Function to encode keys from type `K` to strings. + value_serializer: Function to serialize values from type `V` to bytes. + value_deserializer: Function to deserialize bytes back to type V. + """ self.store = store self.key_encoder = key_encoder self.value_serializer = value_serializer self.value_deserializer = value_deserializer def mget(self, keys: Sequence[K]) -> list[V | None]: - """Get the values associated with the given keys.""" + """Get the values associated with the given keys. + + Args: + keys: A sequence of keys. + + Returns: + A sequence of optional values associated with the keys. + If a key is not found, the corresponding value will be `None`. + """ encoded_keys: list[str] = [self.key_encoder(key) for key in keys] values = self.store.mget(encoded_keys) return [ @@ -72,7 +87,15 @@ class EncoderBackedStore(BaseStore[K, V]): ] async def amget(self, keys: Sequence[K]) -> list[V | None]: - """Get the values associated with the given keys.""" + """Async get the values associated with the given keys. + + Args: + keys: A sequence of keys. + + Returns: + A sequence of optional values associated with the keys. + If a key is not found, the corresponding value will be `None`. + """ encoded_keys: list[str] = [self.key_encoder(key) for key in keys] values = await self.store.amget(encoded_keys) return [ @@ -81,7 +104,11 @@ class EncoderBackedStore(BaseStore[K, V]): ] def mset(self, key_value_pairs: Sequence[tuple[K, V]]) -> None: - """Set the values for the given keys.""" + """Set the values for the given keys. + + Args: + key_value_pairs: A sequence of key-value pairs. + """ encoded_pairs = [ (self.key_encoder(key), self.value_serializer(value)) for key, value in key_value_pairs @@ -89,7 +116,11 @@ class EncoderBackedStore(BaseStore[K, V]): self.store.mset(encoded_pairs) async def amset(self, key_value_pairs: Sequence[tuple[K, V]]) -> None: - """Set the values for the given keys.""" + """Async set the values for the given keys. + + Args: + key_value_pairs: A sequence of key-value pairs. + """ encoded_pairs = [ (self.key_encoder(key), self.value_serializer(value)) for key, value in key_value_pairs @@ -97,12 +128,20 @@ class EncoderBackedStore(BaseStore[K, V]): await self.store.amset(encoded_pairs) def mdelete(self, keys: Sequence[K]) -> None: - """Delete the given keys and their associated values.""" + """Delete the given keys and their associated values. + + Args: + keys: A sequence of keys to delete. + """ encoded_keys = [self.key_encoder(key) for key in keys] self.store.mdelete(encoded_keys) async def amdelete(self, keys: Sequence[K]) -> None: - """Delete the given keys and their associated values.""" + """Async delete the given keys and their associated values. + + Args: + keys: A sequence of keys to delete. + """ encoded_keys = [self.key_encoder(key) for key in keys] await self.store.amdelete(encoded_keys) @@ -111,7 +150,14 @@ class EncoderBackedStore(BaseStore[K, V]): *, prefix: str | None = None, ) -> Iterator[K] | Iterator[str]: - """Get an iterator over keys that match the given prefix.""" + """Get an iterator over keys that match the given prefix. + + Args: + prefix: The prefix to match. + + Yields: + Keys that match the given prefix. + """ # For the time being this does not return K, but str # it's for debugging purposes. Should fix this. yield from self.store.yield_keys(prefix=prefix) @@ -121,7 +167,14 @@ class EncoderBackedStore(BaseStore[K, V]): *, prefix: str | None = None, ) -> AsyncIterator[K] | AsyncIterator[str]: - """Get an iterator over keys that match the given prefix.""" + """Async get an iterator over keys that match the given prefix. + + Args: + prefix: The prefix to match. + + Yields: + Keys that match the given prefix. + """ # For the time being this does not return K, but str # it's for debugging purposes. Should fix this. async for key in self.store.ayield_keys(prefix=prefix): diff --git a/libs/langchain/langchain_classic/storage/file_system.py b/libs/langchain/langchain_classic/storage/file_system.py index 226768451ef..67a0a76f5a2 100644 --- a/libs/langchain/langchain_classic/storage/file_system.py +++ b/libs/langchain/langchain_classic/storage/file_system.py @@ -10,10 +10,10 @@ from langchain_classic.storage.exceptions import InvalidKeyException class LocalFileStore(ByteStore): - """BaseStore interface that works on the local file system. + """`BaseStore` interface that works on the local file system. Examples: - Create a LocalFileStore instance and perform operations on it: + Create a `LocalFileStore` instance and perform operations on it: ```python from langchain_classic.storage import LocalFileStore @@ -44,19 +44,18 @@ class LocalFileStore(ByteStore): chmod_dir: int | None = None, update_atime: bool = False, ) -> None: - """Implement the BaseStore interface for the local file system. + """Implement the `BaseStore` interface for the local file system. Args: - root_path (Union[str, Path]): The root path of the file store. All keys are - interpreted as paths relative to this root. - chmod_file: (optional, defaults to `None`) If specified, sets permissions - for newly created files, overriding the current `umask` if needed. - chmod_dir: (optional, defaults to `None`) If specified, sets permissions - for newly created dirs, overriding the current `umask` if needed. - update_atime: (optional, defaults to `False`) If `True`, updates the - filesystem access time (but not the modified time) when a file is read. - This allows MRU/LRU cache policies to be implemented for filesystems - where access time updates are disabled. + root_path: The root path of the file store. All keys are interpreted as + paths relative to this root. + chmod_file: Sets permissions for newly created files, overriding the + current `umask` if needed. + chmod_dir: Sets permissions for newly created dirs, overriding the + current `umask` if needed. + update_atime: Updates the filesystem access time (but not the modified + time) when a file is read. This allows MRU/LRU cache policies to be + implemented for filesystems where access time updates are disabled. """ self.root_path = Path(root_path).absolute() self.chmod_file = chmod_file @@ -67,10 +66,10 @@ class LocalFileStore(ByteStore): """Get the full path for a given key relative to the root path. Args: - key (str): The key relative to the root path. + key: The key relative to the root path. Returns: - Path: The full path for the given key. + The full path for the given key. """ if not re.match(r"^[a-zA-Z0-9_.\-/]+$", key): msg = f"Invalid characters in key: {key}" @@ -94,10 +93,7 @@ class LocalFileStore(ByteStore): whereas the explicit `os.chmod()` used here is not. Args: - dir_path: (Path) The store directory to make - - Returns: - None + dir_path: The store directory to make. """ if not dir_path.exists(): self._mkdir_for_store(dir_path.parent) @@ -113,7 +109,7 @@ class LocalFileStore(ByteStore): Returns: A sequence of optional values associated with the keys. - If a key is not found, the corresponding value will be None. + If a key is not found, the corresponding value will be `None`. """ values: list[bytes | None] = [] for key in keys: @@ -133,9 +129,6 @@ class LocalFileStore(ByteStore): Args: key_value_pairs: A sequence of key-value pairs. - - Returns: - None """ for key, value in key_value_pairs: full_path = self._get_full_path(key) @@ -148,10 +141,7 @@ class LocalFileStore(ByteStore): """Delete the given keys and their associated values. Args: - keys (Sequence[str]): A sequence of keys to delete. - - Returns: - None + keys: A sequence of keys to delete. """ for key in keys: full_path = self._get_full_path(key) @@ -162,10 +152,10 @@ class LocalFileStore(ByteStore): """Get an iterator over keys that match the given prefix. Args: - prefix (str | None): The prefix to match. + prefix: The prefix to match. - Returns: - Iterator[str]: An iterator over keys that match the given prefix. + Yields: + Keys that match the given prefix. """ prefix_path = self._get_full_path(prefix) if prefix else self.root_path for file in prefix_path.rglob("*"): diff --git a/libs/langchain/langchain_classic/tools/convert_to_openai.py b/libs/langchain/langchain_classic/tools/convert_to_openai.py index d9f639a382f..1e185e3d248 100644 --- a/libs/langchain/langchain_classic/tools/convert_to_openai.py +++ b/libs/langchain/langchain_classic/tools/convert_to_openai.py @@ -1,4 +1,6 @@ -from langchain_core.utils.function_calling import format_tool_to_openai_function +from langchain_core.utils.function_calling import ( + convert_to_openai_function as format_tool_to_openai_function, +) # For backwards compatibility __all__ = ["format_tool_to_openai_function"] diff --git a/libs/langchain/langchain_classic/tools/jira/tool.py b/libs/langchain/langchain_classic/tools/jira/tool.py index f9216f08a42..e5f70b14187 100644 --- a/libs/langchain/langchain_classic/tools/jira/tool.py +++ b/libs/langchain/langchain_classic/tools/jira/tool.py @@ -27,10 +27,10 @@ def __getattr__(name: str) -> Any: """Dynamically retrieve attributes from the updated module path. Args: - name (str): The name of the attribute to import. + name: The name of the attribute to import. Returns: - Any: The resolved attribute from the updated path. + The resolved attribute from the updated path. """ return _import_attribute(name) diff --git a/libs/langchain/langchain_classic/tools/json/tool.py b/libs/langchain/langchain_classic/tools/json/tool.py index ca85daed0a5..2ff1e4b8444 100644 --- a/libs/langchain/langchain_classic/tools/json/tool.py +++ b/libs/langchain/langchain_classic/tools/json/tool.py @@ -35,10 +35,10 @@ def __getattr__(name: str) -> Any: at runtime and forward them to their new locations. Args: - name (str): The name of the attribute to import. + name: The name of the attribute to import. Returns: - Any: The resolved attribute from the appropriate updated module. + The resolved attribute from the appropriate updated module. """ return _import_attribute(name) diff --git a/libs/langchain/langchain_classic/tools/render.py b/libs/langchain/langchain_classic/tools/render.py index cbe6e2bb0e9..50604080499 100644 --- a/libs/langchain/langchain_classic/tools/render.py +++ b/libs/langchain/langchain_classic/tools/render.py @@ -11,8 +11,10 @@ from langchain_core.tools import ( render_text_description_and_args, ) from langchain_core.utils.function_calling import ( - format_tool_to_openai_function, - format_tool_to_openai_tool, + convert_to_openai_function as format_tool_to_openai_function, +) +from langchain_core.utils.function_calling import ( + convert_to_openai_tool as format_tool_to_openai_tool, ) __all__ = [ diff --git a/libs/langchain/langchain_classic/tools/zapier/tool.py b/libs/langchain/langchain_classic/tools/zapier/tool.py index 1249bb8bcf3..d04673abe6b 100644 --- a/libs/langchain/langchain_classic/tools/zapier/tool.py +++ b/libs/langchain/langchain_classic/tools/zapier/tool.py @@ -33,10 +33,10 @@ def __getattr__(name: str) -> Any: at runtime and forward them to their new locations. Args: - name (str): The name of the attribute to import. + name: The name of the attribute to import. Returns: - Any: The resolved attribute from the appropriate updated module. + The resolved attribute from the appropriate updated module. """ return _import_attribute(name) diff --git a/libs/langchain/langchain_classic/utils/__init__.py b/libs/langchain/langchain_classic/utils/__init__.py index 533d8db4ed7..0777d743a35 100644 --- a/libs/langchain/langchain_classic/utils/__init__.py +++ b/libs/langchain/langchain_classic/utils/__init__.py @@ -1,4 +1,4 @@ -"""**Utility functions** for LangChain. +"""Utility functions for LangChain. These functions do not depend on any other LangChain module. """ diff --git a/libs/langchain/langchain_classic/utils/openai_functions.py b/libs/langchain/langchain_classic/utils/openai_functions.py index 6e093c35d6f..0e21c36857f 100644 --- a/libs/langchain/langchain_classic/utils/openai_functions.py +++ b/libs/langchain/langchain_classic/utils/openai_functions.py @@ -1,8 +1,9 @@ +from langchain_core.utils.function_calling import FunctionDescription, ToolDescription from langchain_core.utils.function_calling import ( - FunctionDescription, - ToolDescription, - convert_pydantic_to_openai_function, - convert_pydantic_to_openai_tool, + convert_to_openai_function as convert_pydantic_to_openai_function, +) +from langchain_core.utils.function_calling import ( + convert_to_openai_tool as convert_pydantic_to_openai_tool, ) __all__ = [ diff --git a/libs/langchain/langchain_classic/vectorstores/__init__.py b/libs/langchain/langchain_classic/vectorstores/__init__.py index 6b61ff01cb1..74882a4987b 100644 --- a/libs/langchain/langchain_classic/vectorstores/__init__.py +++ b/libs/langchain/langchain_classic/vectorstores/__init__.py @@ -71,7 +71,6 @@ if TYPE_CHECKING: SupabaseVectorStore, Tair, TencentVectorDB, - Tigris, TileDB, TimescaleVector, Typesense, @@ -149,7 +148,6 @@ DEPRECATED_LOOKUP = { "SupabaseVectorStore": "langchain_community.vectorstores", "Tair": "langchain_community.vectorstores", "TencentVectorDB": "langchain_community.vectorstores", - "Tigris": "langchain_community.vectorstores", "TileDB": "langchain_community.vectorstores", "TimescaleVector": "langchain_community.vectorstores", "Typesense": "langchain_community.vectorstores", @@ -231,7 +229,6 @@ __all__ = [ "SupabaseVectorStore", "Tair", "TencentVectorDB", - "Tigris", "TileDB", "TimescaleVector", "Typesense", diff --git a/libs/langchain/langchain_classic/vectorstores/tigris.py b/libs/langchain/langchain_classic/vectorstores/tigris.py deleted file mode 100644 index 8ba9f5af3cc..00000000000 --- a/libs/langchain/langchain_classic/vectorstores/tigris.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import TYPE_CHECKING, Any - -from langchain_classic._api import create_importer - -if TYPE_CHECKING: - from langchain_community.vectorstores import Tigris - -# Create a way to dynamically look up deprecated imports. -# Used to consolidate logic for raising deprecation warnings and -# handling optional imports. -DEPRECATED_LOOKUP = {"Tigris": "langchain_community.vectorstores"} - -_import_attribute = create_importer(__package__, deprecated_lookups=DEPRECATED_LOOKUP) - - -def __getattr__(name: str) -> Any: - """Look up attributes dynamically.""" - return _import_attribute(name) - - -__all__ = [ - "Tigris", -] diff --git a/libs/langchain/pyproject.toml b/libs/langchain/pyproject.toml index ddd7e2963cd..f259e7d7044 100644 --- a/libs/langchain/pyproject.toml +++ b/libs/langchain/pyproject.toml @@ -3,12 +3,17 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-classic" +description = "Building applications with LLMs through composability" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.0" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", - "langchain-text-splitters>=1.0.0a1,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", + "langchain-text-splitters>=1.0.0,<2.0.0", "langsmith>=0.1.17,<1.0.0", "pydantic>=2.7.4,<3.0.0", "SQLAlchemy>=1.4.0,<3.0.0", @@ -16,10 +21,6 @@ dependencies = [ "PyYAML>=5.3.0,<7.0.0", "async-timeout>=4.0.0,<5.0.0; python_version < \"3.11\"", ] -name = "langchain-classic" -version = "1.0.0a1" -description = "Building applications with LLMs through composability" -readme = "README.md" [project.optional-dependencies] #community = ["langchain-community"] @@ -33,7 +34,7 @@ fireworks = ["langchain-fireworks"] ollama = ["langchain-ollama"] together = ["langchain-together"] mistralai = ["langchain-mistralai"] -#huggingface = ["langchain-huggingface"] +huggingface = ["langchain-huggingface"] groq = ["langchain-groq"] aws = ["langchain-aws"] deepseek = ["langchain-deepseek"] @@ -41,12 +42,13 @@ xai = ["langchain-xai"] perplexity = ["langchain-perplexity"] [project.urls] -homepage = "https://docs.langchain.com/" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/langchain" -changelog = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-classic%3D%3D1%22" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/" +Documentation = "https://reference.langchain.com/python/langchain_classic/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/langchain" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-classic%3D%3D1%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ @@ -62,7 +64,6 @@ test = [ "numpy>=2.1.0; python_version>='3.13'", "cffi<1.17.1; python_version < \"3.10\"", "cffi; python_version >= \"3.10\"", - "duckdb-engine>=0.9.2,<1.0.0", "freezegun>=1.2.2,<2.0.0", "responses>=0.22.0,<1.0.0", "lark>=1.1.5,<2.0.0", @@ -153,6 +154,7 @@ ignore = [ "TC003", # Doesn't play well with Pydantic "TD002", # Missing author in TODO "TD003", # Missing issue link in TODO + "RUF002", # Em-dash in docstring # TODO rules "ANN401", # No type Any diff --git a/libs/langchain/scripts/lint_imports.sh b/libs/langchain/scripts/lint_imports.sh index 1ab9dd7a733..b8cb03b69bc 100755 --- a/libs/langchain/scripts/lint_imports.sh +++ b/libs/langchain/scripts/lint_imports.sh @@ -6,23 +6,23 @@ set -eu errors=0 # Check the conditions -git grep '^from langchain import' langchain | grep -vE 'from langchain import (__version__|hub)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/pydantic_v1 | grep -vE 'from langchain.(pydantic_v1|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/load | grep -vE 'from langchain.(pydantic_v1|load|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/utils | grep -vE 'from langchain.(pydantic_v1|utils|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/schema | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|env|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/adapters | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/callbacks | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|_api)' && errors=$((errors+1)) +git grep '^from langchain import' langchain_classic | grep -vE 'from langchain import (__version__|hub)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/pydantic_v1 | grep -vE 'from langchain.(pydantic_v1|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/load | grep -vE 'from langchain.(pydantic_v1|load|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/utils | grep -vE 'from langchain.(pydantic_v1|utils|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/schema | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|env|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/adapters | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/callbacks | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|_api)' && errors=$((errors+1)) # TODO: it's probably not amazing so that so many other modules depend on `langchain_community.utilities`, because there can be a lot of imports there -git grep '^from langchain\.' langchain/utilities | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|utilities|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/storage | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|storage|utilities|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/prompts | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|prompts|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/output_parsers | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|prompts|_api|output_parsers|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/llms | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|prompts|llms|utilities|globals|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/chat_models | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|llms|prompts|adapters|chat_models|utilities|globals|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/embeddings | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|storage|llms|embeddings|utilities|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/docstore | grep -vE 'from langchain.(pydantic_v1|utils|schema|docstore|_api)' && errors=$((errors+1)) -git grep '^from langchain\.' langchain/vectorstores | grep -vE 'from +git grep '^from langchain\.' langchain_classic/utilities | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|utilities|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/storage | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|storage|utilities|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/prompts | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|prompts|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/output_parsers | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|prompts|_api|output_parsers|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/llms | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|prompts|llms|utilities|globals|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/chat_models | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|llms|prompts|adapters|chat_models|utilities|globals|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/embeddings | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|storage|llms|embeddings|utilities|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/docstore | grep -vE 'from langchain.(pydantic_v1|utils|schema|docstore|_api)' && errors=$((errors+1)) +git grep '^from langchain\.' langchain_classic/vectorstores | grep -vE 'from langchain.(pydantic_v1|utils|schema|load|callbacks|env|_api|storage|llms|docstore|vectorstores|utilities|_api)' && errors=$((errors+1)) # make sure not importing from langchain_experimental git --no-pager grep '^from langchain_experimental\.' . && errors=$((errors+1)) diff --git a/libs/langchain/tests/integration_tests/chat_models/test_base.py b/libs/langchain/tests/integration_tests/chat_models/test_base.py index eddc6cb45bb..07c03b725ef 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_base.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_base.py @@ -25,7 +25,7 @@ async def test_init_chat_model_chain() -> None: model_with_config = model_with_tools.with_config( RunnableConfig(tags=["foo"]), - configurable={"bar_model": "claude-3-7-sonnet-20250219"}, + configurable={"bar_model": "claude-sonnet-4-5-20250929"}, ) prompt = ChatPromptTemplate.from_messages([("system", "foo"), ("human", "{input}")]) chain = prompt | model_with_config diff --git a/libs/langchain/tests/unit_tests/agents/test_agent.py b/libs/langchain/tests/unit_tests/agents/test_agent.py index 072a3c7faf7..8cbcb198560 100644 --- a/libs/langchain/tests/unit_tests/agents/test_agent.py +++ b/libs/langchain/tests/unit_tests/agents/test_agent.py @@ -450,7 +450,7 @@ def test_agent_invalid_tool() -> None: async def test_runnable_agent() -> None: - """Simple test to verify that an agent built with LCEL works.""" + """Simple test to verify that an agent built via composition works.""" # Will alternate between responding with hello and goodbye infinite_cycle = cycle([AIMessage(content="hello world!")]) # When streaming GenericFakeChatModel breaks AIMessage into chunks based on spaces diff --git a/libs/langchain/tests/unit_tests/chains/test_base.py b/libs/langchain/tests/unit_tests/chains/test_base.py index a0e21fab2b6..a607eada5b9 100644 --- a/libs/langchain/tests/unit_tests/chains/test_base.py +++ b/libs/langchain/tests/unit_tests/chains/test_base.py @@ -6,10 +6,10 @@ from typing import Any import pytest from langchain_core.callbacks.manager import CallbackManagerForChainRun -from langchain_core.memory import BaseMemory from langchain_core.tracers.context import collect_runs from typing_extensions import override +from langchain_classic.base_memory import BaseMemory from langchain_classic.chains.base import Chain from langchain_classic.schema import RUN_KEY from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler diff --git a/libs/langchain/tests/unit_tests/chains/test_conversation.py b/libs/langchain/tests/unit_tests/chains/test_conversation.py index 0913204b77e..7ff07f45da6 100644 --- a/libs/langchain/tests/unit_tests/chains/test_conversation.py +++ b/libs/langchain/tests/unit_tests/chains/test_conversation.py @@ -6,10 +6,10 @@ from typing import Any import pytest from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.language_models import LLM -from langchain_core.memory import BaseMemory from langchain_core.prompts.prompt import PromptTemplate from typing_extensions import override +from langchain_classic.base_memory import BaseMemory from langchain_classic.chains.conversation.base import ConversationChain from langchain_classic.memory.buffer import ConversationBufferMemory from langchain_classic.memory.buffer_window import ConversationBufferWindowMemory diff --git a/libs/langchain/tests/unit_tests/chains/test_memory.py b/libs/langchain/tests/unit_tests/chains/test_memory.py index 2be4384c397..0894d5f92df 100644 --- a/libs/langchain/tests/unit_tests/chains/test_memory.py +++ b/libs/langchain/tests/unit_tests/chains/test_memory.py @@ -1,6 +1,6 @@ import pytest -from langchain_core.memory import BaseMemory +from langchain_classic.base_memory import BaseMemory from langchain_classic.chains.conversation.memory import ( ConversationBufferMemory, ConversationBufferWindowMemory, diff --git a/libs/langchain/tests/unit_tests/chat_models/test_base.py b/libs/langchain/tests/unit_tests/chat_models/test_base.py index 400deabe9c6..2b769aa4a38 100644 --- a/libs/langchain/tests/unit_tests/chat_models/test_base.py +++ b/libs/langchain/tests/unit_tests/chat_models/test_base.py @@ -32,7 +32,7 @@ def test_all_imports() -> None: ("model_name", "model_provider"), [ ("gpt-4o", "openai"), - ("claude-3-opus-20240229", "anthropic"), + ("claude-opus-4-1", "anthropic"), ("accounts/fireworks/models/mixtral-8x7b-instruct", "fireworks"), ("mixtral-8x7b-32768", "groq"), ], @@ -241,17 +241,17 @@ def test_configurable_with_default() -> None: model_with_config = model_with_tools.with_config( RunnableConfig(tags=["foo"]), - configurable={"bar_model": "claude-3-7-sonnet-20250219"}, + configurable={"bar_model": "claude-sonnet-4-5-20250929"}, ) - assert model_with_config.model == "claude-3-7-sonnet-20250219" # type: ignore[attr-defined] + assert model_with_config.model == "claude-sonnet-4-5-20250929" # type: ignore[attr-defined] assert model_with_config.model_dump() == { # type: ignore[attr-defined] "name": None, "bound": { "name": None, "disable_streaming": False, - "model": "claude-3-7-sonnet-20250219", + "model": "claude-sonnet-4-5-20250929", "mcp_servers": None, "max_tokens": 64000, "temperature": None, diff --git a/libs/langchain/tests/unit_tests/embeddings/test_imports.py b/libs/langchain/tests/unit_tests/embeddings/test_imports.py index 1a2d86854a6..0b65208e43d 100644 --- a/libs/langchain/tests/unit_tests/embeddings/test_imports.py +++ b/libs/langchain/tests/unit_tests/embeddings/test_imports.py @@ -11,6 +11,7 @@ EXPECTED_ALL = [ "FastEmbedEmbeddings", "HuggingFaceEmbeddings", "HuggingFaceInferenceAPIEmbeddings", + "HypotheticalDocumentEmbedder", "InfinityEmbeddings", "GradientEmbeddings", "JinaEmbeddings", diff --git a/libs/langchain/tests/unit_tests/indexes/test_indexing.py b/libs/langchain/tests/unit_tests/indexes/test_indexing.py index ebc87aee00f..e3d5c85c37d 100644 --- a/libs/langchain/tests/unit_tests/indexes/test_indexing.py +++ b/libs/langchain/tests/unit_tests/indexes/test_indexing.py @@ -446,7 +446,7 @@ def test_incremental_fails_with_bad_source_ids( with pytest.raises( ValueError, - match="Source ids are required when cleanup mode is incremental or scoped_full", + match="Source IDs are required when cleanup mode is incremental or scoped_full", ): # Should raise an error because no source id function was specified index( @@ -496,7 +496,7 @@ async def test_aincremental_fails_with_bad_source_ids( with pytest.raises( ValueError, - match="Source ids are required when cleanup mode is incremental or scoped_full", + match="Source IDs are required when cleanup mode is incremental or scoped_full", ): # Should raise an error because no source id function was specified await aindex( diff --git a/libs/langchain/tests/unit_tests/test_dependencies.py b/libs/langchain/tests/unit_tests/test_dependencies.py index 7b9e12f0e4d..9bf5dcf95f8 100644 --- a/libs/langchain/tests/unit_tests/test_dependencies.py +++ b/libs/langchain/tests/unit_tests/test_dependencies.py @@ -57,7 +57,6 @@ def test_test_group_dependencies(uv_conf: Mapping[str, Any]) -> None: assert sorted(test_group_deps) == sorted( [ - "duckdb-engine", "freezegun", "langchain-core", "langchain-tests", diff --git a/libs/langchain/tests/unit_tests/test_imports.py b/libs/langchain/tests/unit_tests/test_imports.py index f13d1e18f32..dc964846f01 100644 --- a/libs/langchain/tests/unit_tests/test_imports.py +++ b/libs/langchain/tests/unit_tests/test_imports.py @@ -96,7 +96,7 @@ def test_no_more_changes_to_proxy_community() -> None: # most cases. hash_ += len(str(sorted(deprecated_lookup.items()))) - evil_magic_number = 38620 + evil_magic_number = 38644 assert hash_ == evil_magic_number, ( "If you're triggering this test, you're likely adding a new import " @@ -108,15 +108,15 @@ def test_no_more_changes_to_proxy_community() -> None: def extract_deprecated_lookup(file_path: str) -> dict[str, Any] | None: - """Detect and extracts the value of a dictionary named DEPRECATED_LOOKUP. + """Detect and extracts the value of a dictionary named `DEPRECATED_LOOKUP`. This variable is located in the global namespace of a Python file. Args: - file_path (str): The path to the Python file. + file_path: The path to the Python file. Returns: - dict or None: The value of DEPRECATED_LOOKUP if it exists, None otherwise. + The value of `DEPRECATED_LOOKUP` if it exists, `None` otherwise. """ tree = ast.parse(Path(file_path).read_text(encoding="utf-8"), filename=file_path) @@ -136,10 +136,10 @@ def _dict_from_ast(node: ast.Dict) -> dict[str, str]: """Convert an AST dict node to a Python dictionary, assuming str to str format. Args: - node (ast.Dict): The AST node representing a dictionary. + node: The AST node representing a dictionary. Returns: - dict: The corresponding Python dictionary. + The corresponding Python dictionary. """ result: dict[str, str] = {} for key, value in zip(node.keys, node.values, strict=False): @@ -153,10 +153,10 @@ def _literal_eval_str(node: ast.AST) -> str: """Evaluate an AST literal node to its corresponding string value. Args: - node (ast.AST): The AST node representing a literal value. + node: The AST node representing a literal value. Returns: - str: The corresponding string value. + The corresponding string value. """ if isinstance(node, ast.Constant) and isinstance(node.value, str): return node.value diff --git a/libs/langchain/tests/unit_tests/utils/test_openai_functions.py b/libs/langchain/tests/unit_tests/utils/test_openai_functions.py index ffe68e64ffa..570cd4d351d 100644 --- a/libs/langchain/tests/unit_tests/utils/test_openai_functions.py +++ b/libs/langchain/tests/unit_tests/utils/test_openai_functions.py @@ -1,4 +1,4 @@ -from langchain_core.utils.function_calling import convert_pydantic_to_openai_function +from langchain_core.utils.function_calling import convert_to_openai_function from pydantic import BaseModel, Field @@ -9,7 +9,7 @@ def test_convert_pydantic_to_openai_function() -> None: key: str = Field(..., description="API key") days: int = Field(default=0, description="Number of days to forecast") - actual = convert_pydantic_to_openai_function(Data) + actual = convert_to_openai_function(Data) expected = { "name": "Data", "description": "The data to return.", @@ -41,7 +41,7 @@ def test_convert_pydantic_to_openai_function_nested() -> None: data: Data - actual = convert_pydantic_to_openai_function(Model) + actual = convert_to_openai_function(Model) expected = { "name": "Model", "description": "The model to return.", diff --git a/libs/langchain/tests/unit_tests/vectorstores/test_public_api.py b/libs/langchain/tests/unit_tests/vectorstores/test_public_api.py index c05bd2a95bd..d639bba807b 100644 --- a/libs/langchain/tests/unit_tests/vectorstores/test_public_api.py +++ b/libs/langchain/tests/unit_tests/vectorstores/test_public_api.py @@ -61,7 +61,6 @@ _EXPECTED = [ "SupabaseVectorStore", "Tair", "TencentVectorDB", - "Tigris", "TileDB", "TimescaleVector", "Typesense", diff --git a/libs/langchain/uv.lock b/libs/langchain/uv.lock index 933859e051c..1b9d095dce7 100644 --- a/libs/langchain/uv.lock +++ b/libs/langchain/uv.lock @@ -950,52 +950,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, ] -[[package]] -name = "duckdb" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/82/93/adc0d183642fc9a602ca9b97cb16754c84b8c1d92e5b99aec412e0c419a8/duckdb-1.4.0.tar.gz", hash = "sha256:bd5edee8bd5a73b5822f2b390668597b5fcdc2d3292c244d8d933bb87ad6ac4c", size = 18453175, upload-time = "2025-09-16T10:22:41.509Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/4a/b2e17dbe2953481b084f355f162ed319a67ef760e28794c6870058583aec/duckdb-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e24e981a6c87e299201694b9bb24fff0beb04ccad399fca6f13072a59814488f", size = 31293005, upload-time = "2025-09-16T10:21:28.296Z" }, - { url = "https://files.pythonhosted.org/packages/a9/89/e34ed03cce7e35b83c1f056126aa4e8e8097eb93e7324463020f85d5cbfa/duckdb-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db500ef2c8cb7dc1ca078740ecf1dceaa20d3f5dc5bce269be45d5cff4170c0f", size = 17288207, upload-time = "2025-09-16T10:21:31.129Z" }, - { url = "https://files.pythonhosted.org/packages/f8/17/7ff24799ee98c4dbb177c3ec6c93e38e9513828785c31757c727b47ad71e/duckdb-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a65739b8a7106634e6e77d0e110fc5e057b88edc9df6cb1683d499a1e5aa3177", size = 14817523, upload-time = "2025-09-16T10:21:33.397Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ab/7a482a76ff75212b5cf4f2172a802f2a59b4ab096416e5821aa62a305bc4/duckdb-1.4.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d59f7be24862adb803a1ddfc9c3b8cb09e6005bca0c9c6f7c631a1da1c3aa0c", size = 18410654, upload-time = "2025-09-16T10:21:35.864Z" }, - { url = "https://files.pythonhosted.org/packages/1e/f6/a235233b973652b31448b6d600604620d02fc552b90ab94ca7f645fd5ac0/duckdb-1.4.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d052a87e9edf4eb3bab0b7a6ac995676018c6083b8049421628dfa3b983a2d4", size = 20399121, upload-time = "2025-09-16T10:21:38.524Z" }, - { url = "https://files.pythonhosted.org/packages/b1/cf/63fedb74d00d7c4e19ffc73a1d8d98ee8d3d6498cf2865509c104aa8e799/duckdb-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:0329b81e587f745b2fc6f3a488ea3188b0f029c3b5feef43792a25eaac84ac01", size = 12283288, upload-time = "2025-09-16T10:21:40.732Z" }, - { url = "https://files.pythonhosted.org/packages/60/e9/b29cc5bceac52e049b20d613551a2171a092df07f26d4315f3f9651c80d4/duckdb-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6505fed1ccae8df9f574e744c48fa32ee2feaeebe5346c2daf4d4d10a8dac5aa", size = 31290878, upload-time = "2025-09-16T10:21:43.256Z" }, - { url = "https://files.pythonhosted.org/packages/1f/68/d88a15dba48bf6a4b33f1be5097ef45c83f7b9e97c854cc638a85bb07d70/duckdb-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:36974a04b29c74ac2143457e95420a7422016d050e28573060b89a90b9cf2b57", size = 17288823, upload-time = "2025-09-16T10:21:45.716Z" }, - { url = "https://files.pythonhosted.org/packages/8c/7e/e3d2101dc6bbd60f2b3c1d748351ff541fc8c48790ac1218c0199cb930f6/duckdb-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:90484b896e5059f145d1facfabea38e22c54a2dcc2bd62dd6c290423f0aee258", size = 14819684, upload-time = "2025-09-16T10:21:48.117Z" }, - { url = "https://files.pythonhosted.org/packages/c4/bb/4ec8e4d03cb5b77d75b9ee0057c2c714cffaa9bda1e55ffec833458af0a3/duckdb-1.4.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a969d624b385853b31a43b0a23089683297da2f14846243921c6dbec8382d659", size = 18410075, upload-time = "2025-09-16T10:21:50.517Z" }, - { url = "https://files.pythonhosted.org/packages/ec/21/e896616d892d50dc1e0c142428e9359b483d4dd6e339231d822e57834ad3/duckdb-1.4.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5935644f96a75e9f6f3c3eeb3da14cdcaf7bad14d1199c08439103decb29466a", size = 20402984, upload-time = "2025-09-16T10:21:52.808Z" }, - { url = "https://files.pythonhosted.org/packages/c4/c0/b5eb9497e4a9167d23fbad745969eaa36e28d346648e17565471892d1b33/duckdb-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:300aa0e963af97969c38440877fffd576fc1f49c1f5914789a9d01f2fe7def91", size = 12282971, upload-time = "2025-09-16T10:21:55.314Z" }, - { url = "https://files.pythonhosted.org/packages/e8/6d/0c774d6af1aed82dbe855d266cb000a1c09ea31ed7d6c3a79e2167a38e7a/duckdb-1.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:18b3a048fca6cc7bafe08b10e1b0ab1509d7a0381ffb2c70359e7dc56d8a705d", size = 31307425, upload-time = "2025-09-16T10:21:57.83Z" }, - { url = "https://files.pythonhosted.org/packages/d3/c0/1fd7b7b2c0c53d8d748d2f28ea9096df5ee9dc39fa736cca68acabe69656/duckdb-1.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2c1271cb85aeacccfd0b1284e816280a7450df1dd4dd85ccb2848563cfdf90e9", size = 17295727, upload-time = "2025-09-16T10:22:02.242Z" }, - { url = "https://files.pythonhosted.org/packages/98/d3/4d4c4bd667b7ada5f6c207c2f127591ebb8468333f207f8f10ff0532578e/duckdb-1.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55064dd2e25711eeaa6a72c25405bdd7994c81a3221657e94309a2faf65d25a6", size = 14826879, upload-time = "2025-09-16T10:22:05.162Z" }, - { url = "https://files.pythonhosted.org/packages/b0/48/e0c1b97d76fb7567c53db5739931323238fad54a642707008104f501db37/duckdb-1.4.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0536d7c81bc506532daccf373ddbc8c6add46aeb70ef3cd5ee70ad5c2b3165ea", size = 18417856, upload-time = "2025-09-16T10:22:07.919Z" }, - { url = "https://files.pythonhosted.org/packages/12/78/297b838f3b9511589badc8f472f70b31cf3bbf9eb99fa0a4d6e911d3114a/duckdb-1.4.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:784554e3ddfcfc5c5c7b1aa1f9925fedb7938f6628729adba48f7ea37554598f", size = 20427154, upload-time = "2025-09-16T10:22:10.216Z" }, - { url = "https://files.pythonhosted.org/packages/ea/57/500d251b886494f6c52d56eeab8a1860572ee62aed05d7d50c71ba2320f3/duckdb-1.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:c5d2aa4d6981f525ada95e6db41bb929403632bb5ff24bd6d6dd551662b1b613", size = 12290108, upload-time = "2025-09-16T10:22:12.668Z" }, - { url = "https://files.pythonhosted.org/packages/2f/64/ee22b2b8572746e1523143b9f28d606575782e0204de5020656a1d15dd14/duckdb-1.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d94d010a09b1a62d9021a2a71cf266188750f3c9b1912ccd6afe104a6ce8010", size = 31307662, upload-time = "2025-09-16T10:22:14.9Z" }, - { url = "https://files.pythonhosted.org/packages/76/2e/4241cd00046ca6b781bd1d9002e8223af061e85d1cc21830aa63e7a7db7c/duckdb-1.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c61756fa8b3374627e5fa964b8e0d5b58e364dce59b87dba7fb7bc6ede196b26", size = 17295617, upload-time = "2025-09-16T10:22:17.239Z" }, - { url = "https://files.pythonhosted.org/packages/f7/98/5ab136bc7b12ac18580350a220db7c00606be9eac2d89de259cce733f64c/duckdb-1.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e70d7d9881ea2c0836695de70ea68c970e18a2856ba3d6502e276c85bd414ae7", size = 14826727, upload-time = "2025-09-16T10:22:19.415Z" }, - { url = "https://files.pythonhosted.org/packages/23/32/57866cf8881288b3dfb9212720221fb890daaa534dbdc6fe3fff3979ecd1/duckdb-1.4.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2de258a93435c977a0ec3a74ec8f60c2f215ddc73d427ee49adc4119558facd3", size = 18421289, upload-time = "2025-09-16T10:22:21.564Z" }, - { url = "https://files.pythonhosted.org/packages/a0/83/7438fb43be451a7d4a04650aaaf662b2ff2d95895bbffe3e0e28cbe030c9/duckdb-1.4.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6d3659641d517dd9ed1ab66f110cdbdaa6900106f116effaf2dbedd83c38de3", size = 20426547, upload-time = "2025-09-16T10:22:23.759Z" }, - { url = "https://files.pythonhosted.org/packages/21/b2/98fb89ae81611855f35984e96f648d871f3967bb3f524b51d1372d052f0c/duckdb-1.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:07fcc612ea5f0fe6032b92bcc93693034eb00e7a23eb9146576911d5326af4f7", size = 12290467, upload-time = "2025-09-16T10:22:25.923Z" }, -] - -[[package]] -name = "duckdb-engine" -version = "0.17.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "duckdb" }, - { name = "packaging" }, - { name = "sqlalchemy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/89/d5/c0d8d0a4ca3ffea92266f33d92a375e2794820ad89f9be97cf0c9a9697d0/duckdb_engine-0.17.0.tar.gz", hash = "sha256:396b23869754e536aa80881a92622b8b488015cf711c5a40032d05d2cf08f3cf", size = 48054, upload-time = "2025-03-29T09:49:17.663Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/a2/e90242f53f7ae41554419b1695b4820b364df87c8350aa420b60b20cab92/duckdb_engine-0.17.0-py3-none-any.whl", hash = "sha256:3aa72085e536b43faab635f487baf77ddc5750069c16a2f8d9c6c3cb6083e979", size = 49676, upload-time = "2025-03-29T09:49:15.564Z" }, -] - [[package]] name = "exceptiongroup" version = "1.3.0" @@ -1467,6 +1421,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d", size = 584358, upload-time = "2025-08-07T13:18:23.708Z" }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5", size = 1113550, upload-time = "2025-08-07T13:42:37.467Z" }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f", size = 1137126, upload-time = "2025-08-07T13:18:20.239Z" }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7", size = 1544904, upload-time = "2025-11-04T12:42:04.763Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8", size = 1611228, upload-time = "2025-11-04T12:42:08.423Z" }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c", size = 298654, upload-time = "2025-08-07T13:50:00.469Z" }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, @@ -1476,6 +1432,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, @@ -1485,6 +1443,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846, upload-time = "2025-11-04T12:42:15.191Z" }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814, upload-time = "2025-11-04T12:42:17.175Z" }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, @@ -1494,6 +1454,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b", size = 1564759, upload-time = "2025-11-04T12:42:19.395Z" }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929", size = 1634288, upload-time = "2025-11-04T12:42:21.174Z" }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, @@ -1501,6 +1463,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269", size = 1612508, upload-time = "2025-11-04T12:42:23.427Z" }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681", size = 1680760, upload-time = "2025-11-04T12:42:25.341Z" }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" }, ] @@ -2299,7 +2263,7 @@ wheels = [ [[package]] name = "langchain-classic" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "async-timeout", marker = "python_full_version < '3.11'" }, @@ -2334,6 +2298,9 @@ google-vertexai = [ groq = [ { name = "langchain-groq" }, ] +huggingface = [ + { name = "langchain-huggingface" }, +] mistralai = [ { name = "langchain-mistralai" }, ] @@ -2368,7 +2335,6 @@ lint = [ test = [ { name = "blockbuster" }, { name = "cffi" }, - { name = "duckdb-engine" }, { name = "freezegun" }, { name = "langchain-core" }, { name = "langchain-openai" }, @@ -2428,6 +2394,7 @@ requires-dist = [ { name = "langchain-google-genai", marker = "extra == 'google-genai'" }, { name = "langchain-google-vertexai", marker = "extra == 'google-vertexai'" }, { name = "langchain-groq", marker = "extra == 'groq'" }, + { name = "langchain-huggingface", marker = "extra == 'huggingface'" }, { name = "langchain-mistralai", marker = "extra == 'mistralai'" }, { name = "langchain-ollama", marker = "extra == 'ollama'" }, { name = "langchain-openai", marker = "extra == 'openai'", editable = "../partners/openai" }, @@ -2441,7 +2408,7 @@ requires-dist = [ { name = "requests", specifier = ">=2.0.0,<3.0.0" }, { name = "sqlalchemy", specifier = ">=1.4.0,<3.0.0" }, ] -provides-extras = ["anthropic", "openai", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "groq", "aws", "deepseek", "xai", "perplexity"] +provides-extras = ["anthropic", "openai", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai", "perplexity"] [package.metadata.requires-dev] dev = [ @@ -2460,7 +2427,6 @@ test = [ { name = "blockbuster", specifier = ">=1.5.18,<1.6.0" }, { name = "cffi", marker = "python_full_version < '3.10'", specifier = "<1.17.1" }, { name = "cffi", marker = "python_full_version >= '3.10'" }, - { name = "duckdb-engine", specifier = ">=0.9.2,<1.0.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "langchain-core", editable = "../core" }, { name = "langchain-openai", editable = "../partners/openai" }, @@ -2512,7 +2478,7 @@ typing = [ [[package]] name = "langchain-core" -version = "1.0.0a8" +version = "1.0.3" source = { editable = "../core" } dependencies = [ { name = "jsonpatch" }, @@ -2546,6 +2512,7 @@ test = [ { name = "blockbuster", specifier = ">=1.5.18,<1.6.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "grandalf", specifier = ">=0.8.0,<1.0.0" }, + { name = "langchain-model-profiles", directory = "../model-profiles" }, { name = "langchain-tests", directory = "../standard-tests" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, @@ -2562,6 +2529,7 @@ test = [ ] test-integration = [] typing = [ + { name = "langchain-model-profiles", directory = "../model-profiles" }, { name = "langchain-text-splitters", directory = "../text-splitters" }, { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, @@ -2646,6 +2614,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d7/7c/3bbe58c4da4fd2e86ac35d7305027e1fe9cba49e0d7a9271ae90ea21b47a/langchain_groq-1.0.0a1-py3-none-any.whl", hash = "sha256:2067e25f2be394a5cde6270d047757c2feb622f9f419d704778c355ce8d9d084", size = 17516, upload-time = "2025-10-02T23:21:33.892Z" }, ] +[[package]] +name = "langchain-huggingface" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, + { name = "langchain-core" }, + { name = "tokenizers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/0d/2a22659534cc70410573a53d32756cf7971de7923ba2ccf940c03ecbe12a/langchain_huggingface-1.0.0.tar.gz", hash = "sha256:0d2eb924ff77dc08bb7dd340ab10d47c1b71372e4297bc12e84c4a66df9a4414", size = 247750, upload-time = "2025-10-17T15:30:35.179Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/30/7476a926e31498ebc4be3131e06650c5136ffb8ba12067f56212e187dd7c/langchain_huggingface-1.0.0-py3-none-any.whl", hash = "sha256:06d6ac57951c6a1c47d329c38f2b32472a839eab2fa14883be784916ea075da0", size = 27493, upload-time = "2025-10-17T15:30:34.023Z" }, +] + [[package]] name = "langchain-mistralai" version = "0.2.12" @@ -2677,7 +2659,7 @@ wheels = [ [[package]] name = "langchain-openai" -version = "1.0.0a4" +version = "1.0.2" source = { editable = "../partners/openai" } dependencies = [ { name = "langchain-core" }, @@ -2697,6 +2679,7 @@ dev = [{ name = "langchain-core", editable = "../core" }] lint = [{ name = "ruff", specifier = ">=0.13.1,<0.14.0" }] test = [ { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, + { name = "langchain", editable = "../langchain_v1" }, { name = "langchain-core", editable = "../core" }, { name = "langchain-tests", editable = "../standard-tests" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, @@ -2716,7 +2699,7 @@ test-integration = [ { name = "httpx", specifier = ">=0.27.0,<1.0.0" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, - { name = "pillow", specifier = ">=10.3.0,<11.0.0" }, + { name = "pillow", specifier = ">=10.3.0,<12.0.0" }, ] typing = [ { name = "langchain-core", editable = "../core" }, @@ -2739,7 +2722,7 @@ wheels = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.1" source = { editable = "../standard-tests" } dependencies = [ { name = "httpx" }, @@ -2784,7 +2767,7 @@ typing = [ [[package]] name = "langchain-text-splitters" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "../text-splitters" } dependencies = [ { name = "langchain-core" }, @@ -2817,8 +2800,8 @@ test-integration = [ { name = "nltk", specifier = ">=3.9.1,<4.0.0" }, { name = "scipy", marker = "python_full_version == '3.12.*'", specifier = ">=1.7.0,<2.0.0" }, { name = "scipy", marker = "python_full_version >= '3.13'", specifier = ">=1.14.1,<2.0.0" }, - { name = "sentence-transformers", specifier = ">=3.0.1,<4.0.0" }, - { name = "spacy", specifier = ">=3.8.7,<4.0.0" }, + { name = "sentence-transformers", marker = "python_full_version < '3.14'", specifier = ">=3.0.1,<4.0.0" }, + { name = "spacy", marker = "python_full_version < '3.14'", specifier = ">=3.8.7,<4.0.0" }, { name = "thinc", specifier = ">=8.3.6,<9.0.0" }, { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "transformers", specifier = ">=4.51.3,<5.0.0" }, @@ -3701,7 +3684,7 @@ wheels = [ [[package]] name = "pandas" -version = "2.3.2" +version = "2.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, @@ -3710,42 +3693,55 @@ dependencies = [ { name = "pytz" }, { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/8e/0e90233ac205ad182bd6b422532695d2b9414944a280488105d598c70023/pandas-2.3.2.tar.gz", hash = "sha256:ab7b58f8f82706890924ccdfb5f48002b83d2b5a3845976a9fb705d36c34dcdb", size = 4488684, upload-time = "2025-08-21T10:28:29.257Z" } +sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/16/a8eeb70aad84ccbf14076793f90e0031eded63c1899aeae9fdfbf37881f4/pandas-2.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52bc29a946304c360561974c6542d1dd628ddafa69134a7131fdfd6a5d7a1a35", size = 11539648, upload-time = "2025-08-21T10:26:36.236Z" }, - { url = "https://files.pythonhosted.org/packages/47/f1/c5bdaea13bf3708554d93e948b7ea74121ce6e0d59537ca4c4f77731072b/pandas-2.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:220cc5c35ffaa764dd5bb17cf42df283b5cb7fdf49e10a7b053a06c9cb48ee2b", size = 10786923, upload-time = "2025-08-21T10:26:40.518Z" }, - { url = "https://files.pythonhosted.org/packages/bb/10/811fa01476d29ffed692e735825516ad0e56d925961819e6126b4ba32147/pandas-2.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c05e15111221384019897df20c6fe893b2f697d03c811ee67ec9e0bb5a3424", size = 11726241, upload-time = "2025-08-21T10:26:43.175Z" }, - { url = "https://files.pythonhosted.org/packages/c4/6a/40b043b06e08df1ea1b6d20f0e0c2f2c4ec8c4f07d1c92948273d943a50b/pandas-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc03acc273c5515ab69f898df99d9d4f12c4d70dbfc24c3acc6203751d0804cf", size = 12349533, upload-time = "2025-08-21T10:26:46.611Z" }, - { url = "https://files.pythonhosted.org/packages/e2/ea/2e081a2302e41a9bca7056659fdd2b85ef94923723e41665b42d65afd347/pandas-2.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d25c20a03e8870f6339bcf67281b946bd20b86f1a544ebbebb87e66a8d642cba", size = 13202407, upload-time = "2025-08-21T10:26:49.068Z" }, - { url = "https://files.pythonhosted.org/packages/f4/12/7ff9f6a79e2ee8869dcf70741ef998b97ea20050fe25f83dc759764c1e32/pandas-2.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21bb612d148bb5860b7eb2c10faacf1a810799245afd342cf297d7551513fbb6", size = 13837212, upload-time = "2025-08-21T10:26:51.832Z" }, - { url = "https://files.pythonhosted.org/packages/d8/df/5ab92fcd76455a632b3db34a746e1074d432c0cdbbd28d7cd1daba46a75d/pandas-2.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:b62d586eb25cb8cb70a5746a378fc3194cb7f11ea77170d59f889f5dfe3cec7a", size = 11338099, upload-time = "2025-08-21T10:26:54.382Z" }, - { url = "https://files.pythonhosted.org/packages/7a/59/f3e010879f118c2d400902d2d871c2226cef29b08c09fb8dc41111730400/pandas-2.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1333e9c299adcbb68ee89a9bb568fc3f20f9cbb419f1dd5225071e6cddb2a743", size = 11563308, upload-time = "2025-08-21T10:26:56.656Z" }, - { url = "https://files.pythonhosted.org/packages/38/18/48f10f1cc5c397af59571d638d211f494dba481f449c19adbd282aa8f4ca/pandas-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:76972bcbd7de8e91ad5f0ca884a9f2c477a2125354af624e022c49e5bd0dfff4", size = 10820319, upload-time = "2025-08-21T10:26:59.162Z" }, - { url = "https://files.pythonhosted.org/packages/95/3b/1e9b69632898b048e223834cd9702052bcf06b15e1ae716eda3196fb972e/pandas-2.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b98bdd7c456a05eef7cd21fd6b29e3ca243591fe531c62be94a2cc987efb5ac2", size = 11790097, upload-time = "2025-08-21T10:27:02.204Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ef/0e2ffb30b1f7fbc9a588bd01e3c14a0d96854d09a887e15e30cc19961227/pandas-2.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d81573b3f7db40d020983f78721e9bfc425f411e616ef019a10ebf597aedb2e", size = 12397958, upload-time = "2025-08-21T10:27:05.409Z" }, - { url = "https://files.pythonhosted.org/packages/23/82/e6b85f0d92e9afb0e7f705a51d1399b79c7380c19687bfbf3d2837743249/pandas-2.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e190b738675a73b581736cc8ec71ae113d6c3768d0bd18bffa5b9a0927b0b6ea", size = 13225600, upload-time = "2025-08-21T10:27:07.791Z" }, - { url = "https://files.pythonhosted.org/packages/e8/f1/f682015893d9ed51611948bd83683670842286a8edd4f68c2c1c3b231eef/pandas-2.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c253828cb08f47488d60f43c5fc95114c771bbfff085da54bfc79cb4f9e3a372", size = 13879433, upload-time = "2025-08-21T10:27:10.347Z" }, - { url = "https://files.pythonhosted.org/packages/a7/e7/ae86261695b6c8a36d6a4c8d5f9b9ede8248510d689a2f379a18354b37d7/pandas-2.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:9467697b8083f9667b212633ad6aa4ab32436dcbaf4cd57325debb0ddef2012f", size = 11336557, upload-time = "2025-08-21T10:27:12.983Z" }, - { url = "https://files.pythonhosted.org/packages/ec/db/614c20fb7a85a14828edd23f1c02db58a30abf3ce76f38806155d160313c/pandas-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fbb977f802156e7a3f829e9d1d5398f6192375a3e2d1a9ee0803e35fe70a2b9", size = 11587652, upload-time = "2025-08-21T10:27:15.888Z" }, - { url = "https://files.pythonhosted.org/packages/99/b0/756e52f6582cade5e746f19bad0517ff27ba9c73404607c0306585c201b3/pandas-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b9b52693123dd234b7c985c68b709b0b009f4521000d0525f2b95c22f15944b", size = 10717686, upload-time = "2025-08-21T10:27:18.486Z" }, - { url = "https://files.pythonhosted.org/packages/37/4c/dd5ccc1e357abfeee8353123282de17997f90ff67855f86154e5a13b81e5/pandas-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bd281310d4f412733f319a5bc552f86d62cddc5f51d2e392c8787335c994175", size = 11278722, upload-time = "2025-08-21T10:27:21.149Z" }, - { url = "https://files.pythonhosted.org/packages/d3/a4/f7edcfa47e0a88cda0be8b068a5bae710bf264f867edfdf7b71584ace362/pandas-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96d31a6b4354e3b9b8a2c848af75d31da390657e3ac6f30c05c82068b9ed79b9", size = 11987803, upload-time = "2025-08-21T10:27:23.767Z" }, - { url = "https://files.pythonhosted.org/packages/f6/61/1bce4129f93ab66f1c68b7ed1c12bac6a70b1b56c5dab359c6bbcd480b52/pandas-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:df4df0b9d02bb873a106971bb85d448378ef14b86ba96f035f50bbd3688456b4", size = 12766345, upload-time = "2025-08-21T10:27:26.6Z" }, - { url = "https://files.pythonhosted.org/packages/8e/46/80d53de70fee835531da3a1dae827a1e76e77a43ad22a8cd0f8142b61587/pandas-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:213a5adf93d020b74327cb2c1b842884dbdd37f895f42dcc2f09d451d949f811", size = 13439314, upload-time = "2025-08-21T10:27:29.213Z" }, - { url = "https://files.pythonhosted.org/packages/28/30/8114832daff7489f179971dbc1d854109b7f4365a546e3ea75b6516cea95/pandas-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c13b81a9347eb8c7548f53fd9a4f08d4dfe996836543f805c987bafa03317ae", size = 10983326, upload-time = "2025-08-21T10:27:31.901Z" }, - { url = "https://files.pythonhosted.org/packages/27/64/a2f7bf678af502e16b472527735d168b22b7824e45a4d7e96a4fbb634b59/pandas-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0c6ecbac99a354a051ef21c5307601093cb9e0f4b1855984a084bfec9302699e", size = 11531061, upload-time = "2025-08-21T10:27:34.647Z" }, - { url = "https://files.pythonhosted.org/packages/54/4c/c3d21b2b7769ef2f4c2b9299fcadd601efa6729f1357a8dbce8dd949ed70/pandas-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6f048aa0fd080d6a06cc7e7537c09b53be6642d330ac6f54a600c3ace857ee9", size = 10668666, upload-time = "2025-08-21T10:27:37.203Z" }, - { url = "https://files.pythonhosted.org/packages/50/e2/f775ba76ecfb3424d7f5862620841cf0edb592e9abd2d2a5387d305fe7a8/pandas-2.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0064187b80a5be6f2f9c9d6bdde29372468751dfa89f4211a3c5871854cfbf7a", size = 11332835, upload-time = "2025-08-21T10:27:40.188Z" }, - { url = "https://files.pythonhosted.org/packages/8f/52/0634adaace9be2d8cac9ef78f05c47f3a675882e068438b9d7ec7ef0c13f/pandas-2.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac8c320bded4718b298281339c1a50fb00a6ba78cb2a63521c39bec95b0209b", size = 12057211, upload-time = "2025-08-21T10:27:43.117Z" }, - { url = "https://files.pythonhosted.org/packages/0b/9d/2df913f14b2deb9c748975fdb2491da1a78773debb25abbc7cbc67c6b549/pandas-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:114c2fe4f4328cf98ce5716d1532f3ab79c5919f95a9cfee81d9140064a2e4d6", size = 12749277, upload-time = "2025-08-21T10:27:45.474Z" }, - { url = "https://files.pythonhosted.org/packages/87/af/da1a2417026bd14d98c236dba88e39837182459d29dcfcea510b2ac9e8a1/pandas-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:48fa91c4dfb3b2b9bfdb5c24cd3567575f4e13f9636810462ffed8925352be5a", size = 13415256, upload-time = "2025-08-21T10:27:49.885Z" }, - { url = "https://files.pythonhosted.org/packages/22/3c/f2af1ce8840ef648584a6156489636b5692c162771918aa95707c165ad2b/pandas-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:12d039facec710f7ba305786837d0225a3444af7bbd9c15c32ca2d40d157ed8b", size = 10982579, upload-time = "2025-08-21T10:28:08.435Z" }, - { url = "https://files.pythonhosted.org/packages/f3/98/8df69c4097a6719e357dc249bf437b8efbde808038268e584421696cbddf/pandas-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c624b615ce97864eb588779ed4046186f967374185c047070545253a52ab2d57", size = 12028163, upload-time = "2025-08-21T10:27:52.232Z" }, - { url = "https://files.pythonhosted.org/packages/0e/23/f95cbcbea319f349e10ff90db488b905c6883f03cbabd34f6b03cbc3c044/pandas-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0cee69d583b9b128823d9514171cabb6861e09409af805b54459bd0c821a35c2", size = 11391860, upload-time = "2025-08-21T10:27:54.673Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1b/6a984e98c4abee22058aa75bfb8eb90dce58cf8d7296f8bc56c14bc330b0/pandas-2.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2319656ed81124982900b4c37f0e0c58c015af9a7bbc62342ba5ad07ace82ba9", size = 11309830, upload-time = "2025-08-21T10:27:56.957Z" }, - { url = "https://files.pythonhosted.org/packages/15/d5/f0486090eb18dd8710bf60afeaf638ba6817047c0c8ae5c6a25598665609/pandas-2.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b37205ad6f00d52f16b6d09f406434ba928c1a1966e2771006a9033c736d30d2", size = 11883216, upload-time = "2025-08-21T10:27:59.302Z" }, - { url = "https://files.pythonhosted.org/packages/10/86/692050c119696da19e20245bbd650d8dfca6ceb577da027c3a73c62a047e/pandas-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:837248b4fc3a9b83b9c6214699a13f069dc13510a6a6d7f9ba33145d2841a012", size = 12699743, upload-time = "2025-08-21T10:28:02.447Z" }, - { url = "https://files.pythonhosted.org/packages/cd/d7/612123674d7b17cf345aad0a10289b2a384bff404e0463a83c4a3a59d205/pandas-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d2c3554bd31b731cd6490d94a28f3abb8dd770634a9e06eb6d2911b9827db370", size = 13186141, upload-time = "2025-08-21T10:28:05.377Z" }, + { url = "https://files.pythonhosted.org/packages/3d/f7/f425a00df4fcc22b292c6895c6831c0c8ae1d9fac1e024d16f98a9ce8749/pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:376c6446ae31770764215a6c937f72d917f214b43560603cd60da6408f183b6c", size = 11555763, upload-time = "2025-09-29T23:16:53.287Z" }, + { url = "https://files.pythonhosted.org/packages/13/4f/66d99628ff8ce7857aca52fed8f0066ce209f96be2fede6cef9f84e8d04f/pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e19d192383eab2f4ceb30b412b22ea30690c9e618f78870357ae1d682912015a", size = 10801217, upload-time = "2025-09-29T23:17:04.522Z" }, + { url = "https://files.pythonhosted.org/packages/1d/03/3fc4a529a7710f890a239cc496fc6d50ad4a0995657dccc1d64695adb9f4/pandas-2.3.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5caf26f64126b6c7aec964f74266f435afef1c1b13da3b0636c7518a1fa3e2b1", size = 12148791, upload-time = "2025-09-29T23:17:18.444Z" }, + { url = "https://files.pythonhosted.org/packages/40/a8/4dac1f8f8235e5d25b9955d02ff6f29396191d4e665d71122c3722ca83c5/pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd7478f1463441ae4ca7308a70e90b33470fa593429f9d4c578dd00d1fa78838", size = 12769373, upload-time = "2025-09-29T23:17:35.846Z" }, + { url = "https://files.pythonhosted.org/packages/df/91/82cc5169b6b25440a7fc0ef3a694582418d875c8e3ebf796a6d6470aa578/pandas-2.3.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4793891684806ae50d1288c9bae9330293ab4e083ccd1c5e383c34549c6e4250", size = 13200444, upload-time = "2025-09-29T23:17:49.341Z" }, + { url = "https://files.pythonhosted.org/packages/10/ae/89b3283800ab58f7af2952704078555fa60c807fff764395bb57ea0b0dbd/pandas-2.3.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28083c648d9a99a5dd035ec125d42439c6c1c525098c58af0fc38dd1a7a1b3d4", size = 13858459, upload-time = "2025-09-29T23:18:03.722Z" }, + { url = "https://files.pythonhosted.org/packages/85/72/530900610650f54a35a19476eca5104f38555afccda1aa11a92ee14cb21d/pandas-2.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:503cf027cf9940d2ceaa1a93cfb5f8c8c7e6e90720a2850378f0b3f3b1e06826", size = 11346086, upload-time = "2025-09-29T23:18:18.505Z" }, + { url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523", size = 11578790, upload-time = "2025-09-29T23:18:30.065Z" }, + { url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45", size = 10833831, upload-time = "2025-09-29T23:38:56.071Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66", size = 12199267, upload-time = "2025-09-29T23:18:41.627Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b", size = 12789281, upload-time = "2025-09-29T23:18:56.834Z" }, + { url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791", size = 13240453, upload-time = "2025-09-29T23:19:09.247Z" }, + { url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151", size = 13890361, upload-time = "2025-09-29T23:19:25.342Z" }, + { url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c", size = 11348702, upload-time = "2025-09-29T23:19:38.296Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, + { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, + { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, + { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, + { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, + { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" }, + { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" }, + { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" }, + { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" }, + { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" }, + { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" }, + { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" }, + { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" }, + { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" }, + { url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0", size = 11540635, upload-time = "2025-09-29T23:25:52.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593", size = 10759079, upload-time = "2025-09-29T23:26:33.204Z" }, + { url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c", size = 11814049, upload-time = "2025-09-29T23:27:15.384Z" }, + { url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b", size = 12332638, upload-time = "2025-09-29T23:27:51.625Z" }, + { url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6", size = 12886834, upload-time = "2025-09-29T23:28:21.289Z" }, + { url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3", size = 13409925, upload-time = "2025-09-29T23:28:58.261Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5", size = 11109071, upload-time = "2025-09-29T23:32:27.484Z" }, + { url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec", size = 12048504, upload-time = "2025-09-29T23:29:31.47Z" }, + { url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7", size = 11410702, upload-time = "2025-09-29T23:29:54.591Z" }, + { url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450", size = 11634535, upload-time = "2025-09-29T23:30:21.003Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5", size = 12121582, upload-time = "2025-09-29T23:30:43.391Z" }, + { url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788", size = 12699963, upload-time = "2025-09-29T23:31:10.009Z" }, + { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" }, ] [[package]] @@ -4180,7 +4176,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -4188,96 +4184,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] @@ -5320,38 +5343,63 @@ wheels = [ [[package]] name = "tiktoken" -version = "0.11.0" +version = "0.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/86/ad0155a37c4f310935d5ac0b1ccf9bdb635dcb906e0a9a26b616dd55825a/tiktoken-0.11.0.tar.gz", hash = "sha256:3c518641aee1c52247c2b97e74d8d07d780092af79d5911a6ab5e79359d9b06a", size = 37648, upload-time = "2025-08-08T23:58:08.495Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/4d/c6a2e7dca2b4f2e9e0bfd62b3fe4f114322e2c028cfba905a72bc76ce479/tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8a9b517d6331d7103f8bef29ef93b3cca95fa766e293147fe7bacddf310d5917", size = 1059937, upload-time = "2025-08-08T23:57:28.57Z" }, - { url = "https://files.pythonhosted.org/packages/41/54/3739d35b9f94cb8dc7b0db2edca7192d5571606aa2369a664fa27e811804/tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b4ddb1849e6bf0afa6cc1c5d809fb980ca240a5fffe585a04e119519758788c0", size = 999230, upload-time = "2025-08-08T23:57:30.241Z" }, - { url = "https://files.pythonhosted.org/packages/dd/f4/ec8d43338d28d53513004ebf4cd83732a135d11011433c58bf045890cc10/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10331d08b5ecf7a780b4fe4d0281328b23ab22cdb4ff65e68d56caeda9940ecc", size = 1130076, upload-time = "2025-08-08T23:57:31.706Z" }, - { url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882", size = 1183942, upload-time = "2025-08-08T23:57:33.142Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c", size = 1244705, upload-time = "2025-08-08T23:57:34.594Z" }, - { url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1", size = 884152, upload-time = "2025-08-08T23:57:36.18Z" }, - { url = "https://files.pythonhosted.org/packages/8a/91/912b459799a025d2842566fe1e902f7f50d54a1ce8a0f236ab36b5bd5846/tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ae374c46afadad0f501046db3da1b36cd4dfbfa52af23c998773682446097cf", size = 1059743, upload-time = "2025-08-08T23:57:37.516Z" }, - { url = "https://files.pythonhosted.org/packages/8c/e9/6faa6870489ce64f5f75dcf91512bf35af5864583aee8fcb0dcb593121f5/tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25a512ff25dc6c85b58f5dd4f3d8c674dc05f96b02d66cdacf628d26a4e4866b", size = 999334, upload-time = "2025-08-08T23:57:38.595Z" }, - { url = "https://files.pythonhosted.org/packages/a1/3e/a05d1547cf7db9dc75d1461cfa7b556a3b48e0516ec29dfc81d984a145f6/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2130127471e293d385179c1f3f9cd445070c0772be73cdafb7cec9a3684c0458", size = 1129402, upload-time = "2025-08-08T23:57:39.627Z" }, - { url = "https://files.pythonhosted.org/packages/34/9a/db7a86b829e05a01fd4daa492086f708e0a8b53952e1dbc9d380d2b03677/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e43022bf2c33f733ea9b54f6a3f6b4354b909f5a73388fb1b9347ca54a069c", size = 1184046, upload-time = "2025-08-08T23:57:40.689Z" }, - { url = "https://files.pythonhosted.org/packages/9d/bb/52edc8e078cf062ed749248f1454e9e5cfd09979baadb830b3940e522015/tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:adb4e308eb64380dc70fa30493e21c93475eaa11669dea313b6bbf8210bfd013", size = 1244691, upload-time = "2025-08-08T23:57:42.251Z" }, - { url = "https://files.pythonhosted.org/packages/60/d9/884b6cd7ae2570ecdcaffa02b528522b18fef1cbbfdbcaa73799807d0d3b/tiktoken-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:ece6b76bfeeb61a125c44bbefdfccc279b5288e6007fbedc0d32bfec602df2f2", size = 884392, upload-time = "2025-08-08T23:57:43.628Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9e/eceddeffc169fc75fe0fd4f38471309f11cb1906f9b8aa39be4f5817df65/tiktoken-0.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fd9e6b23e860973cf9526544e220b223c60badf5b62e80a33509d6d40e6c8f5d", size = 1055199, upload-time = "2025-08-08T23:57:45.076Z" }, - { url = "https://files.pythonhosted.org/packages/4f/cf/5f02bfefffdc6b54e5094d2897bc80efd43050e5b09b576fd85936ee54bf/tiktoken-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76d53cee2da71ee2731c9caa747398762bda19d7f92665e882fef229cb0b5b", size = 996655, upload-time = "2025-08-08T23:57:46.304Z" }, - { url = "https://files.pythonhosted.org/packages/65/8e/c769b45ef379bc360c9978c4f6914c79fd432400a6733a8afc7ed7b0726a/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef72aab3ea240646e642413cb363b73869fed4e604dcfd69eec63dc54d603e8", size = 1128867, upload-time = "2025-08-08T23:57:47.438Z" }, - { url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd", size = 1183308, upload-time = "2025-08-08T23:57:48.566Z" }, - { url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e", size = 1244301, upload-time = "2025-08-08T23:57:49.642Z" }, - { url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f", size = 884282, upload-time = "2025-08-08T23:57:50.759Z" }, - { url = "https://files.pythonhosted.org/packages/cc/cd/a9034bcee638716d9310443818d73c6387a6a96db93cbcb0819b77f5b206/tiktoken-0.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a5f3f25ffb152ee7fec78e90a5e5ea5b03b4ea240beed03305615847f7a6ace2", size = 1055339, upload-time = "2025-08-08T23:57:51.802Z" }, - { url = "https://files.pythonhosted.org/packages/f1/91/9922b345f611b4e92581f234e64e9661e1c524875c8eadd513c4b2088472/tiktoken-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dc6e9ad16a2a75b4c4be7208055a1f707c9510541d94d9cc31f7fbdc8db41d8", size = 997080, upload-time = "2025-08-08T23:57:53.442Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9d/49cd047c71336bc4b4af460ac213ec1c457da67712bde59b892e84f1859f/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a0517634d67a8a48fd4a4ad73930c3022629a85a217d256a6e9b8b47439d1e4", size = 1128501, upload-time = "2025-08-08T23:57:54.808Z" }, - { url = "https://files.pythonhosted.org/packages/52/d5/a0dcdb40dd2ea357e83cb36258967f0ae96f5dd40c722d6e382ceee6bba9/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fb4effe60574675118b73c6fbfd3b5868e5d7a1f570d6cc0d18724b09ecf318", size = 1182743, upload-time = "2025-08-08T23:57:56.307Z" }, - { url = "https://files.pythonhosted.org/packages/3b/17/a0fc51aefb66b7b5261ca1314afa83df0106b033f783f9a7bcbe8e741494/tiktoken-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94f984c9831fd32688aef4348803b0905d4ae9c432303087bae370dc1381a2b8", size = 1244057, upload-time = "2025-08-08T23:57:57.628Z" }, - { url = "https://files.pythonhosted.org/packages/50/79/bcf350609f3a10f09fe4fc207f132085e497fdd3612f3925ab24d86a0ca0/tiktoken-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2177ffda31dec4023356a441793fed82f7af5291120751dee4d696414f54db0c", size = 883901, upload-time = "2025-08-08T23:57:59.359Z" }, + { url = "https://files.pythonhosted.org/packages/89/b3/2cb7c17b6c4cf8ca983204255d3f1d95eda7213e247e6947a0ee2c747a2c/tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3de02f5a491cfd179aec916eddb70331814bd6bf764075d39e21d5862e533970", size = 1051991, upload-time = "2025-10-06T20:21:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/27/0f/df139f1df5f6167194ee5ab24634582ba9a1b62c6b996472b0277ec80f66/tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6cfb6d9b7b54d20af21a912bfe63a2727d9cfa8fbda642fd8322c70340aad16", size = 995798, upload-time = "2025-10-06T20:21:35.579Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5d/26a691f28ab220d5edc09b9b787399b130f24327ef824de15e5d85ef21aa/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:cde24cdb1b8a08368f709124f15b36ab5524aac5fa830cc3fdce9c03d4fb8030", size = 1129865, upload-time = "2025-10-06T20:21:36.675Z" }, + { url = "https://files.pythonhosted.org/packages/b2/94/443fab3d4e5ebecac895712abd3849b8da93b7b7dec61c7db5c9c7ebe40c/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6de0da39f605992649b9cfa6f84071e3f9ef2cec458d08c5feb1b6f0ff62e134", size = 1152856, upload-time = "2025-10-06T20:21:37.873Z" }, + { url = "https://files.pythonhosted.org/packages/54/35/388f941251b2521c70dd4c5958e598ea6d2c88e28445d2fb8189eecc1dfc/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6faa0534e0eefbcafaccb75927a4a380463a2eaa7e26000f0173b920e98b720a", size = 1195308, upload-time = "2025-10-06T20:21:39.577Z" }, + { url = "https://files.pythonhosted.org/packages/f8/00/c6681c7f833dd410576183715a530437a9873fa910265817081f65f9105f/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:82991e04fc860afb933efb63957affc7ad54f83e2216fe7d319007dab1ba5892", size = 1255697, upload-time = "2025-10-06T20:21:41.154Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d2/82e795a6a9bafa034bf26a58e68fe9a89eeaaa610d51dbeb22106ba04f0a/tiktoken-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:6fb2995b487c2e31acf0a9e17647e3b242235a20832642bb7a9d1a181c0c1bb1", size = 879375, upload-time = "2025-10-06T20:21:43.201Z" }, + { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, + { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, ] [[package]] @@ -5643,14 +5691,14 @@ wheels = [ [[package]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] diff --git a/libs/langchain_v1/Makefile b/libs/langchain_v1/Makefile index 7df0cec386f..032ffeeee64 100644 --- a/libs/langchain_v1/Makefile +++ b/libs/langchain_v1/Makefile @@ -28,7 +28,7 @@ coverage: $(TEST_FILE) test: - make start_services && LANGGRAPH_TEST_FAST=0 uv run --group test pytest -n auto --disable-socket --allow-unix-socket $(TEST_FILE) --cov-report term-missing:skip-covered; \ + make start_services && LANGGRAPH_TEST_FAST=0 uv run --no-sync --active --group test pytest -n auto --disable-socket --allow-unix-socket $(TEST_FILE) --cov-report term-missing:skip-covered; \ EXIT_CODE=$$?; \ make stop_services; \ exit $$EXIT_CODE diff --git a/libs/langchain_v1/README.md b/libs/langchain_v1/README.md index 1960f29e79b..49db352a0f0 100644 --- a/libs/langchain_v1/README.md +++ b/libs/langchain_v1/README.md @@ -1,8 +1,7 @@ # πŸ¦œοΈπŸ”— LangChain -⚑ Building applications with LLMs through composability ⚑ - -[![PyPI - License](https://img.shields.io/pypi/l/langchain?style=flat-square)](https://opensource.org/licenses/MIT) +[![PyPI - Version](https://img.shields.io/pypi/v/langchain?label=%20)](https://pypi.org/project/langchain/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain)](https://opensource.org/licenses/MIT) [![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain)](https://pypistats.org/packages/langchain) [![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) @@ -13,67 +12,28 @@ To help you ship LangChain apps to production faster, check out [LangSmith](http ## Quick Install -`pip install langchain` +```bash +pip install langchain +``` ## πŸ€” What is this? -Large language models (LLMs) are emerging as a transformative technology, enabling developers to build applications that they previously could not. However, using these LLMs in isolation is often insufficient for creating a truly powerful app - the real power comes when you can combine them with other sources of computation or knowledge. +LangChain is the easiest way to start building agents and applications powered by LLMs. With under 10 lines of code, you can connect to OpenAI, Anthropic, Google, and [more](https://docs.langchain.com/oss/python/integrations/providers/overview). LangChain provides a pre-built agent architecture and model integrations to help you get started quickly and seamlessly incorporate LLMs into your agents and applications. -This library aims to assist in the development of those types of applications. Common examples of these applications include: +We recommend you use LangChain if you want to quickly build agents and autonomous applications. Use [LangGraph](https://docs.langchain.com/oss/python/langgraph/overview), our low-level agent orchestration framework and runtime, when you have more advanced needs that require a combination of deterministic and agentic workflows, heavy customization, and carefully controlled latency. -**❓ Question answering with RAG** - -- [Documentation](https://python.langchain.com/docs/tutorials/rag/) -- End-to-end Example: [Chat LangChain](https://chat.langchain.com) and [repo](https://github.com/langchain-ai/chat-langchain) - -**🧱 Extracting structured output** - -- [Documentation](https://python.langchain.com/docs/tutorials/extraction/) -- End-to-end Example: [SQL Llama2 Template](https://github.com/langchain-ai/langchain-extract/) - -**πŸ€– Chatbots** - -- [Documentation](https://python.langchain.com/docs/tutorials/chatbot/) -- End-to-end Example: [Web LangChain (web researcher chatbot)](https://weblangchain.vercel.app) and [repo](https://github.com/langchain-ai/weblangchain) +LangChain [agents](https://docs.langchain.com/oss/python/langchain/agents) are built on top of LangGraph in order to provide durable execution, streaming, human-in-the-loop, persistence, and more. (You do not need to know LangGraph for basic LangChain agent usage.) ## πŸ“– Documentation -Please see [our full documentation](https://python.langchain.com) on: +For full documentation, see the [API reference](https://reference.langchain.com/python/langchain/langchain/). -- Getting started (installation, setting up the environment, simple examples) -- How-To examples (demos, integrations, helper functions) -- Reference (full API docs) -- Resources (high-level explanation of core concepts) +## πŸ“• Releases & Versioning -## πŸš€ What can this help with? - -There are five main areas that LangChain is designed to help with. -These are, in increasing order of complexity: - -**πŸ€– Agents:** - -Agents involve an LLM making decisions about which Actions to take, taking that Action, seeing an Observation, and repeating that until done. LangChain provides a standard interface for agents, a selection of agents to choose from, and examples of end-to-end agents. - -**πŸ“š Retrieval Augmented Generation:** - -Retrieval Augmented Generation involves specific types of chains that first interact with an external data source to fetch data for use in the generation step. Examples include summarization of long pieces of text and question/answering over specific data sources. - -**🧐 Evaluation:** - -Generative models are notoriously hard to evaluate with traditional metrics. One new way of evaluating them is using language models themselves to do the evaluation. LangChain provides some prompts/chains for assisting in this. - -**πŸ“ƒ Models and Prompts:** - -This includes prompt management, prompt optimization, a generic interface for all LLMs, and common utilities for working with chat models and LLMs. - -**πŸ”— Chains:** - -Chains go beyond a single LLM call and involve sequences of calls (whether to an LLM or a different utility). LangChain provides a standard interface for chains, lots of integrations with other tools, and end-to-end chains for common applications. - -For more information on these concepts, please see our [full documentation](https://python.langchain.com). +See our [Releases](https://docs.langchain.com/oss/python/release-policy) and [Versioning](https://docs.langchain.com/oss/python/versioning) policies. ## πŸ’ Contributing As an open-source project in a rapidly developing field, we are extremely open to contributions, whether it be in the form of a new feature, improved infrastructure, or better documentation. -For detailed information on how to contribute, see the [Contributing Guide](https://python.langchain.com/docs/contributing/). +For detailed information on how to contribute, see the [Contributing Guide](https://docs.langchain.com/oss/python/contributing/overview). diff --git a/libs/langchain_v1/langchain/__init__.py b/libs/langchain_v1/langchain/__init__.py index 7be32be5ba2..eaea90f7695 100644 --- a/libs/langchain_v1/langchain/__init__.py +++ b/libs/langchain_v1/langchain/__init__.py @@ -1,3 +1,3 @@ """Main entrypoint into LangChain.""" -__version__ = "1.0.0a12" +__version__ = "1.0.4" diff --git a/libs/langchain_v1/langchain/agents/__init__.py b/libs/langchain_v1/langchain/agents/__init__.py index 72abb7ed627..67ac01b1ac5 100644 --- a/libs/langchain_v1/langchain/agents/__init__.py +++ b/libs/langchain_v1/langchain/agents/__init__.py @@ -1,4 +1,10 @@ -"""langgraph.prebuilt exposes a higher-level API for creating and executing agents and tools.""" +"""Entrypoint to building [Agents](https://docs.langchain.com/oss/python/langchain/agents) with LangChain. + +!!! warning "Reference docs" + This page contains **reference documentation** for Agents. See + [the docs](https://docs.langchain.com/oss/python/langchain/agents) for conceptual + guides, tutorials, and examples on using Agents. +""" # noqa: E501 from langchain.agents.factory import create_agent from langchain.agents.middleware.types import AgentState diff --git a/libs/langchain_v1/langchain/agents/factory.py b/libs/langchain_v1/langchain/agents/factory.py index f61d11d067d..c8880b65cd7 100644 --- a/libs/langchain_v1/langchain/agents/factory.py +++ b/libs/langchain_v1/langchain/agents/factory.py @@ -3,7 +3,6 @@ from __future__ import annotations import itertools -from dataclasses import dataclass from typing import ( TYPE_CHECKING, Annotated, @@ -14,27 +13,29 @@ from typing import ( get_type_hints, ) -if TYPE_CHECKING: - from collections.abc import Awaitable - from langchain_core.language_models.chat_models import BaseChatModel from langchain_core.messages import AIMessage, AnyMessage, SystemMessage, ToolMessage from langchain_core.tools import BaseTool from langgraph._internal._runnable import RunnableCallable from langgraph.constants import END, START from langgraph.graph.state import StateGraph +from langgraph.prebuilt.tool_node import ToolCallWithContext, ToolNode from langgraph.runtime import Runtime # noqa: TC002 from langgraph.types import Command, Send from langgraph.typing import ContextT # noqa: TC002 -from typing_extensions import NotRequired, Required, TypedDict, TypeVar +from typing_extensions import NotRequired, Required, TypedDict from langchain.agents.middleware.types import ( AgentMiddleware, AgentState, JumpTo, ModelRequest, + ModelResponse, OmitFromSchema, - PublicAgentState, + ResponseT, + StateT_co, + _InputAgentState, + _OutputAgentState, ) from langchain.agents.structured_output import ( AutoStrategy, @@ -43,15 +44,14 @@ from langchain.agents.structured_output import ( ProviderStrategy, ProviderStrategyBinding, ResponseFormat, + StructuredOutputError, StructuredOutputValidationError, ToolStrategy, ) from langchain.chat_models import init_chat_model -from langchain.tools import ToolNode -from langchain.tools.tool_node import ToolCallWithContext if TYPE_CHECKING: - from collections.abc import Callable, Sequence + from collections.abc import Awaitable, Callable, Sequence from langchain_core.runnables import Runnable from langgraph.cache.base import BaseCache @@ -59,42 +59,29 @@ if TYPE_CHECKING: from langgraph.store.base import BaseStore from langgraph.types import Checkpointer - from langchain.tools.tool_node import ToolCallHandler, ToolCallRequest + from langchain.agents.middleware.types import ToolCallRequest, ToolCallWrapper STRUCTURED_OUTPUT_ERROR_TEMPLATE = "Error: {error}\n Please fix your mistakes." -ResponseT = TypeVar("ResponseT") - -@dataclass -class _InternalModelResponse: - """Internal wrapper for model execution results. - - Contains either a successful result or an exception, plus cached metadata. - Middleware receives either AIMessage via .send() or Exception via .throw(). - """ - - result: AIMessage | None - """The AI message result on success.""" - - exception: Exception | None - """The exception on error.""" - - effective_response_format: Any = None - """Cached response format after auto-detection.""" +def _normalize_to_model_response(result: ModelResponse | AIMessage) -> ModelResponse: + """Normalize middleware return value to ModelResponse.""" + if isinstance(result, AIMessage): + return ModelResponse(result=[result], structured_response=None) + return result def _chain_model_call_handlers( handlers: Sequence[ Callable[ - [ModelRequest, Callable[[ModelRequest], AIMessage]], - AIMessage, + [ModelRequest, Callable[[ModelRequest], ModelResponse]], + ModelResponse | AIMessage, ] ], ) -> ( Callable[ - [ModelRequest, Callable[[ModelRequest], AIMessage]], - AIMessage, + [ModelRequest, Callable[[ModelRequest], ModelResponse]], + ModelResponse, ] | None ): @@ -107,7 +94,7 @@ def _chain_model_call_handlers( handlers: List of handlers. First handler wraps all others. Returns: - Composed handler, or None if handlers empty. + Composed handler, or `None` if handlers empty. Example: ```python @@ -137,33 +124,45 @@ def _chain_model_call_handlers( return None if len(handlers) == 1: - return handlers[0] + # Single handler - wrap to normalize output + single_handler = handlers[0] + + def normalized_single( + request: ModelRequest, + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelResponse: + result = single_handler(request, handler) + return _normalize_to_model_response(result) + + return normalized_single def compose_two( outer: Callable[ - [ModelRequest, Callable[[ModelRequest], AIMessage]], - AIMessage, + [ModelRequest, Callable[[ModelRequest], ModelResponse]], + ModelResponse | AIMessage, ], inner: Callable[ - [ModelRequest, Callable[[ModelRequest], AIMessage]], - AIMessage, + [ModelRequest, Callable[[ModelRequest], ModelResponse]], + ModelResponse | AIMessage, ], ) -> Callable[ - [ModelRequest, Callable[[ModelRequest], AIMessage]], - AIMessage, + [ModelRequest, Callable[[ModelRequest], ModelResponse]], + ModelResponse, ]: """Compose two handlers where outer wraps inner.""" def composed( request: ModelRequest, - handler: Callable[[ModelRequest], AIMessage], - ) -> AIMessage: - # Create a wrapper that calls inner with the base handler - def inner_handler(req: ModelRequest) -> AIMessage: - return inner(req, handler) + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelResponse: + # Create a wrapper that calls inner with the base handler and normalizes + def inner_handler(req: ModelRequest) -> ModelResponse: + inner_result = inner(req, handler) + return _normalize_to_model_response(inner_result) - # Call outer with the wrapped inner as its handler - return outer(request, inner_handler) + # Call outer with the wrapped inner as its handler and normalize + outer_result = outer(request, inner_handler) + return _normalize_to_model_response(outer_result) return composed @@ -172,62 +171,83 @@ def _chain_model_call_handlers( for handler in reversed(handlers[:-1]): result = compose_two(handler, result) - return result + # Wrap to ensure final return type is exactly ModelResponse + def final_normalized( + request: ModelRequest, + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelResponse: + # result here is typed as returning ModelResponse | AIMessage but compose_two normalizes + final_result = result(request, handler) + return _normalize_to_model_response(final_result) + + return final_normalized def _chain_async_model_call_handlers( handlers: Sequence[ Callable[ - [ModelRequest, Callable[[ModelRequest], Awaitable[AIMessage]]], - Awaitable[AIMessage], + [ModelRequest, Callable[[ModelRequest], Awaitable[ModelResponse]]], + Awaitable[ModelResponse | AIMessage], ] ], ) -> ( Callable[ - [ModelRequest, Callable[[ModelRequest], Awaitable[AIMessage]]], - Awaitable[AIMessage], + [ModelRequest, Callable[[ModelRequest], Awaitable[ModelResponse]]], + Awaitable[ModelResponse], ] | None ): - """Compose multiple async wrap_model_call handlers into single middleware stack. + """Compose multiple async `wrap_model_call` handlers into single middleware stack. Args: handlers: List of async handlers. First handler wraps all others. Returns: - Composed async handler, or None if handlers empty. + Composed async handler, or `None` if handlers empty. """ if not handlers: return None if len(handlers) == 1: - return handlers[0] + # Single handler - wrap to normalize output + single_handler = handlers[0] + + async def normalized_single( + request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelResponse: + result = await single_handler(request, handler) + return _normalize_to_model_response(result) + + return normalized_single def compose_two( outer: Callable[ - [ModelRequest, Callable[[ModelRequest], Awaitable[AIMessage]]], - Awaitable[AIMessage], + [ModelRequest, Callable[[ModelRequest], Awaitable[ModelResponse]]], + Awaitable[ModelResponse | AIMessage], ], inner: Callable[ - [ModelRequest, Callable[[ModelRequest], Awaitable[AIMessage]]], - Awaitable[AIMessage], + [ModelRequest, Callable[[ModelRequest], Awaitable[ModelResponse]]], + Awaitable[ModelResponse | AIMessage], ], ) -> Callable[ - [ModelRequest, Callable[[ModelRequest], Awaitable[AIMessage]]], - Awaitable[AIMessage], + [ModelRequest, Callable[[ModelRequest], Awaitable[ModelResponse]]], + Awaitable[ModelResponse], ]: """Compose two async handlers where outer wraps inner.""" async def composed( request: ModelRequest, - handler: Callable[[ModelRequest], Awaitable[AIMessage]], - ) -> AIMessage: - # Create a wrapper that calls inner with the base handler - async def inner_handler(req: ModelRequest) -> AIMessage: - return await inner(req, handler) + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelResponse: + # Create a wrapper that calls inner with the base handler and normalizes + async def inner_handler(req: ModelRequest) -> ModelResponse: + inner_result = await inner(req, handler) + return _normalize_to_model_response(inner_result) - # Call outer with the wrapped inner as its handler - return await outer(request, inner_handler) + # Call outer with the wrapped inner as its handler and normalize + outer_result = await outer(request, inner_handler) + return _normalize_to_model_response(outer_result) return composed @@ -236,16 +256,26 @@ def _chain_async_model_call_handlers( for handler in reversed(handlers[:-1]): result = compose_two(handler, result) - return result + # Wrap to ensure final return type is exactly ModelResponse + async def final_normalized( + request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelResponse: + # result here is typed as returning ModelResponse | AIMessage but compose_two normalizes + final_result = await result(request, handler) + return _normalize_to_model_response(final_result) + + return final_normalized def _resolve_schema(schemas: set[type], schema_name: str, omit_flag: str | None = None) -> type: - """Resolve schema by merging schemas and optionally respecting OmitFromSchema annotations. + """Resolve schema by merging schemas and optionally respecting `OmitFromSchema` annotations. Args: schemas: List of schema types to merge - schema_name: Name for the generated TypedDict - omit_flag: If specified, omit fields with this flag set ('input' or 'output') + schema_name: Name for the generated `TypedDict` + omit_flag: If specified, omit fields with this flag set (`'input'` or + `'output'`) """ all_annotations = {} @@ -285,11 +315,11 @@ def _extract_metadata(type_: type) -> list: def _get_can_jump_to(middleware: AgentMiddleware[Any, Any], hook_name: str) -> list[JumpTo]: - """Get the can_jump_to list from either sync or async hook methods. + """Get the `can_jump_to` list from either sync or async hook methods. Args: middleware: The middleware instance to inspect. - hook_name: The name of the hook ('before_model' or 'after_model'). + hook_name: The name of the hook (`'before_model'` or `'after_model'`). Returns: List of jump destinations, or empty list if not configured. @@ -323,7 +353,7 @@ def _supports_provider_strategy(model: str | BaseChatModel) -> bool: """Check if a model supports provider-specific structured output. Args: - model: Model name string or BaseChatModel instance. + model: Model name string or `BaseChatModel` instance. Returns: `True` if the model supports provider-specific structured output, `False` otherwise. @@ -346,7 +376,7 @@ def _handle_structured_output_error( exception: Exception, response_format: ResponseFormat, ) -> tuple[bool, str]: - """Handle structured output error. Returns (should_retry, retry_tool_message).""" + """Handle structured output error. Returns `(should_retry, retry_tool_message)`.""" if not isinstance(response_format, ToolStrategy): return False, "" @@ -372,30 +402,30 @@ def _handle_structured_output_error( return False, "" -def _chain_tool_call_handlers( - handlers: Sequence[ToolCallHandler], -) -> ToolCallHandler | None: - """Compose handlers into middleware stack (first = outermost). +def _chain_tool_call_wrappers( + wrappers: Sequence[ToolCallWrapper], +) -> ToolCallWrapper | None: + """Compose wrappers into middleware stack (first = outermost). Args: - handlers: Handlers in middleware order. + wrappers: Wrappers in middleware order. Returns: - Composed handler, or None if empty. + Composed wrapper, or `None` if empty. Example: - handler = _chain_tool_call_handlers([auth, cache, retry]) + wrapper = _chain_tool_call_wrappers([auth, cache, retry]) # Request flows: auth -> cache -> retry -> tool # Response flows: tool -> retry -> cache -> auth """ - if not handlers: + if not wrappers: return None - if len(handlers) == 1: - return handlers[0] + if len(wrappers) == 1: + return wrappers[0] - def compose_two(outer: ToolCallHandler, inner: ToolCallHandler) -> ToolCallHandler: - """Compose two handlers where outer wraps inner.""" + def compose_two(outer: ToolCallWrapper, inner: ToolCallWrapper) -> ToolCallWrapper: + """Compose two wrappers where outer wraps inner.""" def composed( request: ToolCallRequest, @@ -410,10 +440,74 @@ def _chain_tool_call_handlers( return composed - # Chain all handlers: first -> second -> ... -> last - result = handlers[-1] - for handler in reversed(handlers[:-1]): - result = compose_two(handler, result) + # Chain all wrappers: first -> second -> ... -> last + result = wrappers[-1] + for wrapper in reversed(wrappers[:-1]): + result = compose_two(wrapper, result) + + return result + + +def _chain_async_tool_call_wrappers( + wrappers: Sequence[ + Callable[ + [ToolCallRequest, Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]]], + Awaitable[ToolMessage | Command], + ] + ], +) -> ( + Callable[ + [ToolCallRequest, Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]]], + Awaitable[ToolMessage | Command], + ] + | None +): + """Compose async wrappers into middleware stack (first = outermost). + + Args: + wrappers: Async wrappers in middleware order. + + Returns: + Composed async wrapper, or `None` if empty. + """ + if not wrappers: + return None + + if len(wrappers) == 1: + return wrappers[0] + + def compose_two( + outer: Callable[ + [ToolCallRequest, Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]]], + Awaitable[ToolMessage | Command], + ], + inner: Callable[ + [ToolCallRequest, Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]]], + Awaitable[ToolMessage | Command], + ], + ) -> Callable[ + [ToolCallRequest, Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]]], + Awaitable[ToolMessage | Command], + ]: + """Compose two async wrappers where outer wraps inner.""" + + async def composed( + request: ToolCallRequest, + execute: Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]], + ) -> ToolMessage | Command: + # Create an async callable that invokes inner with the original execute + async def call_inner(req: ToolCallRequest) -> ToolMessage | Command: + return await inner(req, execute) + + # Outer can call call_inner multiple times + return await outer(request, call_inner) + + return composed + + # Chain all wrappers: first -> second -> ... -> last + result = wrappers[-1] + for wrapper in reversed(wrappers[:-1]): + result = compose_two(wrapper, result) return result @@ -422,9 +516,10 @@ def create_agent( # noqa: PLR0915 model: str | BaseChatModel, tools: Sequence[BaseTool | Callable | dict[str, Any]] | None = None, *, - system_prompt: str | None = None, - middleware: Sequence[AgentMiddleware[AgentState[ResponseT], ContextT]] = (), + system_prompt: str | SystemMessage | None = None, + middleware: Sequence[AgentMiddleware[StateT_co, ContextT]] = (), response_format: ResponseFormat[ResponseT] | type[ResponseT] | None = None, + state_schema: type[AgentState[ResponseT]] | None = None, context_schema: type[ContextT] | None = None, checkpointer: Checkpointer | None = None, store: BaseStore | None = None, @@ -434,56 +529,87 @@ def create_agent( # noqa: PLR0915 name: str | None = None, cache: BaseCache | None = None, ) -> CompiledStateGraph[ - AgentState[ResponseT], ContextT, PublicAgentState[ResponseT], PublicAgentState[ResponseT] + AgentState[ResponseT], ContextT, _InputAgentState, _OutputAgentState[ResponseT] ]: """Creates an agent graph that calls tools in a loop until a stopping condition is met. For more details on using `create_agent`, - visit [Agents](https://docs.langchain.com/oss/python/langchain/agents) documentation. + visit the [Agents](https://docs.langchain.com/oss/python/langchain/agents) docs. Args: model: The language model for the agent. Can be a string identifier - (e.g., `"openai:gpt-4"`), a chat model instance (e.g., `ChatOpenAI()`). - tools: A list of tools, dicts, or callables. If `None` or an empty list, - the agent will consist of a model node without a tool calling loop. - system_prompt: An optional system prompt for the LLM. If provided as a string, - it will be converted to a SystemMessage and added to the beginning - of the message list. + (e.g., `"openai:gpt-4"`) or a direct chat model instance (e.g., + [`ChatOpenAI`][langchain_openai.ChatOpenAI] or other another + [chat model](https://docs.langchain.com/oss/python/integrations/chat)). + + For a full list of supported model strings, see + [`init_chat_model`][langchain.chat_models.init_chat_model(model_provider)]. + tools: A list of tools, `dicts`, or `Callable`. + + If `None` or an empty list, the agent will consist of a model node without a + tool calling loop. + system_prompt: An optional system prompt for the LLM or + can already be a [`SystemMessage`][langchain.messages.SystemMessage] object. + middleware: A sequence of middleware instances to apply to the agent. - Middleware can intercept and modify agent behavior at various stages. + + Middleware can intercept and modify agent behavior at various stages. See + the [full guide](https://docs.langchain.com/oss/python/langchain/middleware). response_format: An optional configuration for structured responses. - Can be a ToolStrategy, ProviderStrategy, or a Pydantic model class. + + Can be a `ToolStrategy`, `ProviderStrategy`, or a Pydantic model class. + If provided, the agent will handle structured output during the conversation flow. Raw schemas will be wrapped in an appropriate strategy based on model capabilities. + state_schema: An optional `TypedDict` schema that extends `AgentState`. + + When provided, this schema is used instead of `AgentState` as the base + schema for merging with middleware state schemas. This allows users to + add custom state fields without needing to create custom middleware. + Generally, it's recommended to use `state_schema` extensions via middleware + to keep relevant extensions scoped to corresponding hooks / tools. + + The schema must be a subclass of `AgentState[ResponseT]`. context_schema: An optional schema for runtime context. - checkpointer: An optional checkpoint saver object. This is used for persisting - the state of the graph (e.g., as chat memory) for a single thread - (e.g., a single conversation). - store: An optional store object. This is used for persisting data - across multiple threads (e.g., multiple conversations / users). + checkpointer: An optional checkpoint saver object. + + Used for persisting the state of the graph (e.g., as chat memory) for a + single thread (e.g., a single conversation). + store: An optional store object. + + Used for persisting data across multiple threads (e.g., multiple + conversations / users). interrupt_before: An optional list of node names to interrupt before. - This is useful if you want to add a user confirmation or other interrupt + + Useful if you want to add a user confirmation or other interrupt before taking an action. interrupt_after: An optional list of node names to interrupt after. - This is useful if you want to return directly or run additional processing + + Useful if you want to return directly or run additional processing on an output. - debug: A flag indicating whether to enable debug mode. - name: An optional name for the CompiledStateGraph. + debug: Whether to enable verbose logging for graph execution. + + When enabled, prints detailed information about each node execution, state + updates, and transitions during agent runtime. Useful for debugging + middleware behavior and understanding agent execution flow. + name: An optional name for the `CompiledStateGraph`. + This name will be automatically used when adding the agent graph to another graph as a subgraph node - particularly useful for building multi-agent systems. - cache: An optional BaseCache instance to enable caching of graph execution. + cache: An optional `BaseCache` instance to enable caching of graph execution. Returns: - A compiled StateGraph that can be used for chat interactions. + A compiled `StateGraph` that can be used for chat interactions. The agent node calls the language model with the messages list (after applying - the system prompt). If the resulting AIMessage contains `tool_calls`, the graph will - then call the tools. The tools node executes the tools and adds the responses - to the messages list as `ToolMessage` objects. The agent node then calls the - language model again. The process repeats until no more `tool_calls` are - present in the response. The agent then returns the full list of messages. + the system prompt). If the resulting [`AIMessage`][langchain.messages.AIMessage] + contains `tool_calls`, the graph will then call the tools. The tools node executes + the tools and adds the responses to the messages list as + [`ToolMessage`][langchain.messages.ToolMessage] objects. The agent node then calls + the language model again. The process repeats until no more `tool_calls` are present + in the response. The agent then returns the full list of messages. Example: ```python @@ -496,7 +622,7 @@ def create_agent( # noqa: PLR0915 graph = create_agent( - model="anthropic:claude-3-7-sonnet-latest", + model="anthropic:claude-sonnet-4-5-20250929", tools=[check_weather], system_prompt="You are a helpful assistant", ) @@ -545,16 +671,37 @@ def create_agent( # noqa: PLR0915 structured_output_tools[structured_tool_info.tool.name] = structured_tool_info middleware_tools = [t for m in middleware for t in getattr(m, "tools", [])] - # Collect middleware with wrap_tool_call hooks + # Collect middleware with wrap_tool_call or awrap_tool_call hooks + # Include middleware with either implementation to ensure NotImplementedError is raised + # when middleware doesn't support the execution path middleware_w_wrap_tool_call = [ - m for m in middleware if m.__class__.wrap_tool_call is not AgentMiddleware.wrap_tool_call + m + for m in middleware + if m.__class__.wrap_tool_call is not AgentMiddleware.wrap_tool_call + or m.__class__.awrap_tool_call is not AgentMiddleware.awrap_tool_call ] # Chain all wrap_tool_call handlers into a single composed handler - wrap_tool_call_handler = None + wrap_tool_call_wrapper = None if middleware_w_wrap_tool_call: - handlers = [m.wrap_tool_call for m in middleware_w_wrap_tool_call] - wrap_tool_call_handler = _chain_tool_call_handlers(handlers) + wrappers = [m.wrap_tool_call for m in middleware_w_wrap_tool_call] + wrap_tool_call_wrapper = _chain_tool_call_wrappers(wrappers) + + # Collect middleware with awrap_tool_call or wrap_tool_call hooks + # Include middleware with either implementation to ensure NotImplementedError is raised + # when middleware doesn't support the execution path + middleware_w_awrap_tool_call = [ + m + for m in middleware + if m.__class__.awrap_tool_call is not AgentMiddleware.awrap_tool_call + or m.__class__.wrap_tool_call is not AgentMiddleware.wrap_tool_call + ] + + # Chain all awrap_tool_call handlers into a single composed async handler + awrap_tool_call_wrapper = None + if middleware_w_awrap_tool_call: + async_wrappers = [m.awrap_tool_call for m in middleware_w_awrap_tool_call] + awrap_tool_call_wrapper = _chain_async_tool_call_wrappers(async_wrappers) # Setup tools tool_node: ToolNode | None = None @@ -567,7 +714,11 @@ def create_agent( # noqa: PLR0915 # Only create ToolNode if we have client-side tools tool_node = ( - ToolNode(tools=available_tools, on_tool_call=wrap_tool_call_handler) + ToolNode( + tools=available_tools, + wrap_tool_call=wrap_tool_call_wrapper, + awrap_tool_call=awrap_tool_call_wrapper, + ) if available_tools else None ) @@ -609,13 +760,23 @@ def create_agent( # noqa: PLR0915 if m.__class__.after_agent is not AgentMiddleware.after_agent or m.__class__.aafter_agent is not AgentMiddleware.aafter_agent ] + # Collect middleware with wrap_model_call or awrap_model_call hooks + # Include middleware with either implementation to ensure NotImplementedError is raised + # when middleware doesn't support the execution path middleware_w_wrap_model_call = [ - m for m in middleware if m.__class__.wrap_model_call is not AgentMiddleware.wrap_model_call + m + for m in middleware + if m.__class__.wrap_model_call is not AgentMiddleware.wrap_model_call + or m.__class__.awrap_model_call is not AgentMiddleware.awrap_model_call ] + # Collect middleware with awrap_model_call or wrap_model_call hooks + # Include middleware with either implementation to ensure NotImplementedError is raised + # when middleware doesn't support the execution path middleware_w_awrap_model_call = [ m for m in middleware if m.__class__.awrap_model_call is not AgentMiddleware.awrap_model_call + or m.__class__.wrap_model_call is not AgentMiddleware.wrap_model_call ] # Compose wrap_model_call handlers into a single middleware stack (sync) @@ -630,18 +791,20 @@ def create_agent( # noqa: PLR0915 async_handlers = [m.awrap_model_call for m in middleware_w_awrap_model_call] awrap_model_call_handler = _chain_async_model_call_handlers(async_handlers) - state_schemas = {m.state_schema for m in middleware} - state_schemas.add(AgentState) + state_schemas: set[type] = {m.state_schema for m in middleware} + # Use provided state_schema if available, otherwise use base AgentState + base_state = state_schema if state_schema is not None else AgentState + state_schemas.add(base_state) - state_schema = _resolve_schema(state_schemas, "StateSchema", None) + resolved_state_schema = _resolve_schema(state_schemas, "StateSchema", None) input_schema = _resolve_schema(state_schemas, "InputSchema", "input") output_schema = _resolve_schema(state_schemas, "OutputSchema", "output") # create graph, add nodes graph: StateGraph[ - AgentState[ResponseT], ContextT, PublicAgentState[ResponseT], PublicAgentState[ResponseT] + AgentState[ResponseT], ContextT, _InputAgentState, _OutputAgentState[ResponseT] ] = StateGraph( - state_schema=state_schema, + state_schema=resolved_state_schema, input_schema=input_schema, output_schema=output_schema, context_schema=context_schema, @@ -663,8 +826,16 @@ def create_agent( # noqa: PLR0915 provider_strategy_binding = ProviderStrategyBinding.from_schema_spec( effective_response_format.schema_spec ) - structured_response = provider_strategy_binding.parse(output) - return {"messages": [output], "structured_response": structured_response} + try: + structured_response = provider_strategy_binding.parse(output) + except Exception as exc: # noqa: BLE001 + schema_name = getattr( + effective_response_format.schema_spec.schema, "__name__", "response_format" + ) + validation_error = StructuredOutputValidationError(schema_name, exc, output) + raise validation_error + else: + return {"messages": [output], "structured_response": structured_response} return {"messages": [output]} # Handle structured output with tool strategy @@ -678,11 +849,11 @@ def create_agent( # noqa: PLR0915 ] if structured_tool_calls: - exception: Exception | None = None + exception: StructuredOutputError | None = None if len(structured_tool_calls) > 1: # Handle multiple structured outputs error tool_names = [tc["name"] for tc in structured_tool_calls] - exception = MultipleStructuredOutputsError(tool_names) + exception = MultipleStructuredOutputsError(tool_names, output) should_retry, error_message = _handle_structured_output_error( exception, effective_response_format ) @@ -724,7 +895,7 @@ def create_agent( # noqa: PLR0915 "structured_response": structured_response, } except Exception as exc: # noqa: BLE001 - exception = StructuredOutputValidationError(tool_call["name"], exc) + exception = StructuredOutputValidationError(tool_call["name"], exc, output) should_retry, error_message = _handle_structured_output_error( exception, effective_response_format ) @@ -753,8 +924,9 @@ def create_agent( # noqa: PLR0915 request: The model request containing model, tools, and response format. Returns: - Tuple of (bound_model, effective_response_format) where `effective_response_format` - is the actual strategy used (may differ from initial if auto-detected). + Tuple of `(bound_model, effective_response_format)` where + `effective_response_format` is the actual strategy used (may differ from + initial if auto-detected). """ # Validate ONLY client-side tools that need to exist in tool_node # Build map of available client-side tools from the ToolNode @@ -857,31 +1029,31 @@ def create_agent( # noqa: PLR0915 ) return request.model.bind(**request.model_settings), None - def _execute_model_sync(request: ModelRequest) -> _InternalModelResponse: - """Execute model and return result or exception. + def _execute_model_sync(request: ModelRequest) -> ModelResponse: + """Execute model and return response. - This is the core model execution logic wrapped by wrap_model_call handlers. + This is the core model execution logic wrapped by `wrap_model_call` handlers. + Raises any exceptions that occur during model invocation. """ - try: - # Get the bound model (with auto-detection if needed) - model_, effective_response_format = _get_bound_model(request) - messages = request.messages - if request.system_prompt: - messages = [SystemMessage(request.system_prompt), *messages] + # Get the bound model (with auto-detection if needed) + model_, effective_response_format = _get_bound_model(request) + messages = request.messages + if request.system_prompt and not isinstance(request.system_prompt, SystemMessage): + messages = [SystemMessage(content=request.system_prompt), *messages] + elif request.system_prompt and isinstance(request.system_prompt, SystemMessage): + messages = [request.system_prompt, *messages] - output = model_.invoke(messages) - return _InternalModelResponse( - result=output, - exception=None, - effective_response_format=effective_response_format, - ) - except Exception as error: # noqa: BLE001 - # Catch all exceptions from model invocation - return _InternalModelResponse( - result=None, - exception=error, - effective_response_format=None, - ) + output = model_.invoke(messages) + + # Handle model output to get messages and structured_response + handled_output = _handle_model_output(output, effective_response_format) + messages_list = handled_output["messages"] + structured_response = handled_output.get("structured_response") + + return ModelResponse( + result=messages_list, + structured_response=structured_response, + ) def model_node(state: AgentState, runtime: Runtime[ContextT]) -> dict[str, Any]: """Sync model request handler with sequential middleware processing.""" @@ -896,58 +1068,47 @@ def create_agent( # noqa: PLR0915 runtime=runtime, ) - # Execute with or without handler - effective_response_format: Any = None - - # Define base handler that executes the model - def base_handler(req: ModelRequest) -> AIMessage: - nonlocal effective_response_format - internal_response = _execute_model_sync(req) - if internal_response.exception is not None: - raise internal_response.exception - if internal_response.result is None: - msg = "Model execution succeeded but returned no result" - raise RuntimeError(msg) - effective_response_format = internal_response.effective_response_format - return internal_response.result - if wrap_model_call_handler is None: # No handlers - execute directly - output = base_handler(request) + response = _execute_model_sync(request) else: # Call composed handler with base handler - output = wrap_model_call_handler(request, base_handler) - return { - "thread_model_call_count": state.get("thread_model_call_count", 0) + 1, - "run_model_call_count": state.get("run_model_call_count", 0) + 1, - **_handle_model_output(output, effective_response_format), - } + response = wrap_model_call_handler(request, _execute_model_sync) - async def _execute_model_async(request: ModelRequest) -> _InternalModelResponse: - """Execute model asynchronously and return result or exception. + # Extract state updates from ModelResponse + state_updates = {"messages": response.result} + if response.structured_response is not None: + state_updates["structured_response"] = response.structured_response - This is the core async model execution logic wrapped by wrap_model_call handlers. + return state_updates + + async def _execute_model_async(request: ModelRequest) -> ModelResponse: + """Execute model asynchronously and return response. + + This is the core async model execution logic wrapped by `wrap_model_call` + handlers. + + Raises any exceptions that occur during model invocation. """ - try: - # Get the bound model (with auto-detection if needed) - model_, effective_response_format = _get_bound_model(request) - messages = request.messages - if request.system_prompt: - messages = [SystemMessage(request.system_prompt), *messages] + # Get the bound model (with auto-detection if needed) + model_, effective_response_format = _get_bound_model(request) + messages = request.messages + if request.system_prompt and not isinstance(request.system_prompt, SystemMessage): + messages = [SystemMessage(content=request.system_prompt), *messages] + elif request.system_prompt and isinstance(request.system_prompt, SystemMessage): + messages = [request.system_prompt, *messages] - output = await model_.ainvoke(messages) - return _InternalModelResponse( - result=output, - exception=None, - effective_response_format=effective_response_format, - ) - except Exception as error: # noqa: BLE001 - # Catch all exceptions from model invocation - return _InternalModelResponse( - result=None, - exception=error, - effective_response_format=None, - ) + output = await model_.ainvoke(messages) + + # Handle model output to get messages and structured_response + handled_output = _handle_model_output(output, effective_response_format) + messages_list = handled_output["messages"] + structured_response = handled_output.get("structured_response") + + return ModelResponse( + result=messages_list, + structured_response=structured_response, + ) async def amodel_node(state: AgentState, runtime: Runtime[ContextT]) -> dict[str, Any]: """Async model request handler with sequential middleware processing.""" @@ -962,32 +1123,19 @@ def create_agent( # noqa: PLR0915 runtime=runtime, ) - # Execute with or without handler - effective_response_format: Any = None - - # Define base async handler that executes the model - async def base_handler(req: ModelRequest) -> AIMessage: - nonlocal effective_response_format - internal_response = await _execute_model_async(req) - if internal_response.exception is not None: - raise internal_response.exception - if internal_response.result is None: - msg = "Model execution succeeded but returned no result" - raise RuntimeError(msg) - effective_response_format = internal_response.effective_response_format - return internal_response.result - if awrap_model_call_handler is None: # No async handlers - execute directly - output = await base_handler(request) + response = await _execute_model_async(request) else: # Call composed async handler with base handler - output = await awrap_model_call_handler(request, base_handler) - return { - "thread_model_call_count": state.get("thread_model_call_count", 0) + 1, - "run_model_call_count": state.get("run_model_call_count", 0) + 1, - **_handle_model_output(output, effective_response_format), - } + response = await awrap_model_call_handler(request, _execute_model_async) + + # Extract state updates from ModelResponse + state_updates = {"messages": response.result} + if response.structured_response is not None: + state_updates["structured_response"] = response.structured_response + + return state_updates # Use sync or async based on model capabilities graph.add_node("model", RunnableCallable(model_node, amodel_node, trace=False)) @@ -1015,7 +1163,9 @@ def create_agent( # noqa: PLR0915 else None ) before_agent_node = RunnableCallable(sync_before_agent, async_before_agent, trace=False) - graph.add_node(f"{m.name}.before_agent", before_agent_node, input_schema=state_schema) + graph.add_node( + f"{m.name}.before_agent", before_agent_node, input_schema=resolved_state_schema + ) if ( m.__class__.before_model is not AgentMiddleware.before_model @@ -1034,7 +1184,9 @@ def create_agent( # noqa: PLR0915 else None ) before_node = RunnableCallable(sync_before, async_before, trace=False) - graph.add_node(f"{m.name}.before_model", before_node, input_schema=state_schema) + graph.add_node( + f"{m.name}.before_model", before_node, input_schema=resolved_state_schema + ) if ( m.__class__.after_model is not AgentMiddleware.after_model @@ -1053,7 +1205,7 @@ def create_agent( # noqa: PLR0915 else None ) after_node = RunnableCallable(sync_after, async_after, trace=False) - graph.add_node(f"{m.name}.after_model", after_node, input_schema=state_schema) + graph.add_node(f"{m.name}.after_model", after_node, input_schema=resolved_state_schema) if ( m.__class__.after_agent is not AgentMiddleware.after_agent @@ -1072,7 +1224,9 @@ def create_agent( # noqa: PLR0915 else None ) after_agent_node = RunnableCallable(sync_after_agent, async_after_agent, trace=False) - graph.add_node(f"{m.name}.after_agent", after_agent_node, input_schema=state_schema) + graph.add_node( + f"{m.name}.after_agent", after_agent_node, input_schema=resolved_state_schema + ) # Determine the entry node (runs once at start): before_agent -> before_model -> model if middleware_w_before_agent: @@ -1105,15 +1259,27 @@ def create_agent( # noqa: PLR0915 graph.add_edge(START, entry_node) # add conditional edges only if tools exist if tool_node is not None: + # Only include exit_node in destinations if any tool has return_direct=True + # or if there are structured output tools + tools_to_model_destinations = [loop_entry_node] + if ( + any(tool.return_direct for tool in tool_node.tools_by_name.values()) + or structured_output_tools + ): + tools_to_model_destinations.append(exit_node) + graph.add_conditional_edges( "tools", - _make_tools_to_model_edge( - tool_node=tool_node, - model_destination=loop_entry_node, - structured_output_tools=structured_output_tools, - end_destination=exit_node, + RunnableCallable( + _make_tools_to_model_edge( + tool_node=tool_node, + model_destination=loop_entry_node, + structured_output_tools=structured_output_tools, + end_destination=exit_node, + ), + trace=False, ), - [loop_entry_node, exit_node], + tools_to_model_destinations, ) # base destinations are tools and exit_node @@ -1128,19 +1294,25 @@ def create_agent( # noqa: PLR0915 graph.add_conditional_edges( loop_exit_node, - _make_model_to_tools_edge( - model_destination=loop_entry_node, - structured_output_tools=structured_output_tools, - end_destination=exit_node, + RunnableCallable( + _make_model_to_tools_edge( + model_destination=loop_entry_node, + structured_output_tools=structured_output_tools, + end_destination=exit_node, + ), + trace=False, ), model_to_tools_destinations, ) elif len(structured_output_tools) > 0: graph.add_conditional_edges( loop_exit_node, - _make_model_to_model_edge( - model_destination=loop_entry_node, - end_destination=exit_node, + RunnableCallable( + _make_model_to_model_edge( + model_destination=loop_entry_node, + end_destination=exit_node, + ), + trace=False, ), [loop_entry_node, exit_node], ) @@ -1378,10 +1550,12 @@ def _make_tools_to_model_edge( last_ai_message, tool_messages = _fetch_last_ai_and_tool_messages(state["messages"]) # 1. Exit condition: All executed tools have return_direct=True - if all( - tool_node.tools_by_name[c["name"]].return_direct - for c in last_ai_message.tool_calls - if c["name"] in tool_node.tools_by_name + # Filter to only client-side tools (provider tools are not in tool_node) + client_side_tool_calls = [ + c for c in last_ai_message.tool_calls if c["name"] in tool_node.tools_by_name + ] + if client_side_tool_calls and all( + tool_node.tools_by_name[c["name"]].return_direct for c in client_side_tool_calls ): return end_destination @@ -1398,7 +1572,9 @@ def _make_tools_to_model_edge( def _add_middleware_edge( - graph: StateGraph[AgentState, ContextT, PublicAgentState, PublicAgentState], + graph: StateGraph[ + AgentState[ResponseT], ContextT, _InputAgentState, _OutputAgentState[ResponseT] + ], *, name: str, default_destination: str, @@ -1437,7 +1613,7 @@ def _add_middleware_edge( if "model" in can_jump_to and name != model_destination: destinations.append(model_destination) - graph.add_conditional_edges(name, jump_edge, destinations) + graph.add_conditional_edges(name, RunnableCallable(jump_edge, trace=False), destinations) else: graph.add_edge(name, default_destination) diff --git a/libs/langchain_v1/langchain/agents/middleware/__init__.py b/libs/langchain_v1/langchain/agents/middleware/__init__.py index d7066c67920..8ed35aafcd5 100644 --- a/libs/langchain_v1/langchain/agents/middleware/__init__.py +++ b/libs/langchain_v1/langchain/agents/middleware/__init__.py @@ -1,22 +1,40 @@ -"""Middleware plugins for agents.""" +"""Entrypoint to using [Middleware](https://docs.langchain.com/oss/python/langchain/middleware) plugins with [Agents](https://docs.langchain.com/oss/python/langchain/agents). + +!!! warning "Reference docs" + This page contains **reference documentation** for Middleware. See + [the docs](https://docs.langchain.com/oss/python/langchain/middleware) for conceptual + guides, tutorials, and examples on using Middleware. +""" # noqa: E501 from .context_editing import ( ClearToolUsesEdit, ContextEditingMiddleware, ) -from .human_in_the_loop import HumanInTheLoopMiddleware +from .human_in_the_loop import ( + HumanInTheLoopMiddleware, + InterruptOnConfig, +) from .model_call_limit import ModelCallLimitMiddleware from .model_fallback import ModelFallbackMiddleware from .pii import PIIDetectionError, PIIMiddleware -from .planning import PlanningMiddleware -from .prompt_caching import AnthropicPromptCachingMiddleware +from .shell_tool import ( + CodexSandboxExecutionPolicy, + DockerExecutionPolicy, + HostExecutionPolicy, + RedactionRule, + ShellToolMiddleware, +) from .summarization import SummarizationMiddleware +from .todo import TodoListMiddleware from .tool_call_limit import ToolCallLimitMiddleware +from .tool_emulator import LLMToolEmulator +from .tool_retry import ToolRetryMiddleware from .tool_selection import LLMToolSelectorMiddleware from .types import ( AgentMiddleware, AgentState, ModelRequest, + ModelResponse, after_agent, after_model, before_agent, @@ -24,25 +42,33 @@ from .types import ( dynamic_prompt, hook_config, wrap_model_call, + wrap_tool_call, ) __all__ = [ "AgentMiddleware", "AgentState", - # should move to langchain-anthropic if we decide to keep it - "AnthropicPromptCachingMiddleware", "ClearToolUsesEdit", + "CodexSandboxExecutionPolicy", "ContextEditingMiddleware", + "DockerExecutionPolicy", + "HostExecutionPolicy", "HumanInTheLoopMiddleware", + "InterruptOnConfig", + "LLMToolEmulator", "LLMToolSelectorMiddleware", "ModelCallLimitMiddleware", "ModelFallbackMiddleware", "ModelRequest", + "ModelResponse", "PIIDetectionError", "PIIMiddleware", - "PlanningMiddleware", + "RedactionRule", + "ShellToolMiddleware", "SummarizationMiddleware", + "TodoListMiddleware", "ToolCallLimitMiddleware", + "ToolRetryMiddleware", "after_agent", "after_model", "before_agent", @@ -50,4 +76,5 @@ __all__ = [ "dynamic_prompt", "hook_config", "wrap_model_call", + "wrap_tool_call", ] diff --git a/libs/langchain_v1/langchain/agents/middleware/_execution.py b/libs/langchain_v1/langchain/agents/middleware/_execution.py new file mode 100644 index 00000000000..f14235bf627 --- /dev/null +++ b/libs/langchain_v1/langchain/agents/middleware/_execution.py @@ -0,0 +1,388 @@ +"""Execution policies for the persistent shell middleware.""" + +from __future__ import annotations + +import abc +import json +import os +import shutil +import subprocess +import sys +import typing +from collections.abc import Mapping, Sequence +from dataclasses import dataclass, field +from pathlib import Path + +try: # pragma: no cover - optional dependency on POSIX platforms + import resource +except ImportError: # pragma: no cover - non-POSIX systems + resource = None # type: ignore[assignment] + + +SHELL_TEMP_PREFIX = "langchain-shell-" + + +def _launch_subprocess( + command: Sequence[str], + *, + env: Mapping[str, str], + cwd: Path, + preexec_fn: typing.Callable[[], None] | None, + start_new_session: bool, +) -> subprocess.Popen[str]: + return subprocess.Popen( # noqa: S603 + list(command), + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=cwd, + text=True, + encoding="utf-8", + errors="replace", + bufsize=1, + env=env, + preexec_fn=preexec_fn, # noqa: PLW1509 + start_new_session=start_new_session, + ) + + +if typing.TYPE_CHECKING: + from collections.abc import Mapping, Sequence + from pathlib import Path + + +@dataclass +class BaseExecutionPolicy(abc.ABC): + """Configuration contract for persistent shell sessions. + + Concrete subclasses encapsulate how a shell process is launched and constrained. + Each policy documents its security guarantees and the operating environments in + which it is appropriate. Use :class:`HostExecutionPolicy` for trusted, same-host + execution; :class:`CodexSandboxExecutionPolicy` when the Codex CLI sandbox is + available and you want additional syscall restrictions; and + :class:`DockerExecutionPolicy` for container-level isolation using Docker. + """ + + command_timeout: float = 30.0 + startup_timeout: float = 30.0 + termination_timeout: float = 10.0 + max_output_lines: int = 100 + max_output_bytes: int | None = None + + def __post_init__(self) -> None: + if self.max_output_lines <= 0: + msg = "max_output_lines must be positive." + raise ValueError(msg) + + @abc.abstractmethod + def spawn( + self, + *, + workspace: Path, + env: Mapping[str, str], + command: Sequence[str], + ) -> subprocess.Popen[str]: + """Launch the persistent shell process.""" + + +@dataclass +class HostExecutionPolicy(BaseExecutionPolicy): + """Run the shell directly on the host process. + + This policy is best suited for trusted or single-tenant environments (CI jobs, + developer workstations, pre-sandboxed containers) where the agent must access the + host filesystem and tooling without additional isolation. It enforces optional CPU + and memory limits to prevent runaway commands but offers **no** filesystem or network + sandboxing; commands can modify anything the process user can reach. + + On Linux platforms resource limits are applied with ``resource.prlimit`` after the + shell starts. On macOS, where ``prlimit`` is unavailable, limits are set in a + ``preexec_fn`` before ``exec``. In both cases the shell runs in its own process group + so timeouts can terminate the full subtree. + """ + + cpu_time_seconds: int | None = None + memory_bytes: int | None = None + create_process_group: bool = True + + _limits_requested: bool = field(init=False, repr=False, default=False) + + def __post_init__(self) -> None: + super().__post_init__() + if self.cpu_time_seconds is not None and self.cpu_time_seconds <= 0: + msg = "cpu_time_seconds must be positive if provided." + raise ValueError(msg) + if self.memory_bytes is not None and self.memory_bytes <= 0: + msg = "memory_bytes must be positive if provided." + raise ValueError(msg) + self._limits_requested = any( + value is not None for value in (self.cpu_time_seconds, self.memory_bytes) + ) + if self._limits_requested and resource is None: + msg = ( + "HostExecutionPolicy cpu/memory limits require the Python 'resource' module. " + "Either remove the limits or run on a POSIX platform." + ) + raise RuntimeError(msg) + + def spawn( + self, + *, + workspace: Path, + env: Mapping[str, str], + command: Sequence[str], + ) -> subprocess.Popen[str]: + process = _launch_subprocess( + list(command), + env=env, + cwd=workspace, + preexec_fn=self._create_preexec_fn(), + start_new_session=self.create_process_group, + ) + self._apply_post_spawn_limits(process) + return process + + def _create_preexec_fn(self) -> typing.Callable[[], None] | None: + if not self._limits_requested or self._can_use_prlimit(): + return None + + def _configure() -> None: # pragma: no cover - depends on OS + if self.cpu_time_seconds is not None: + limit = (self.cpu_time_seconds, self.cpu_time_seconds) + resource.setrlimit(resource.RLIMIT_CPU, limit) + if self.memory_bytes is not None: + limit = (self.memory_bytes, self.memory_bytes) + if hasattr(resource, "RLIMIT_AS"): + resource.setrlimit(resource.RLIMIT_AS, limit) + elif hasattr(resource, "RLIMIT_DATA"): + resource.setrlimit(resource.RLIMIT_DATA, limit) + + return _configure + + def _apply_post_spawn_limits(self, process: subprocess.Popen[str]) -> None: + if not self._limits_requested or not self._can_use_prlimit(): + return + if resource is None: # pragma: no cover - defensive + return + pid = process.pid + if pid is None: + return + try: + prlimit = typing.cast("typing.Any", resource).prlimit + if self.cpu_time_seconds is not None: + prlimit(pid, resource.RLIMIT_CPU, (self.cpu_time_seconds, self.cpu_time_seconds)) + if self.memory_bytes is not None: + limit = (self.memory_bytes, self.memory_bytes) + if hasattr(resource, "RLIMIT_AS"): + prlimit(pid, resource.RLIMIT_AS, limit) + elif hasattr(resource, "RLIMIT_DATA"): + prlimit(pid, resource.RLIMIT_DATA, limit) + except OSError as exc: # pragma: no cover - depends on platform support + msg = "Failed to apply resource limits via prlimit." + raise RuntimeError(msg) from exc + + @staticmethod + def _can_use_prlimit() -> bool: + return ( + resource is not None + and hasattr(resource, "prlimit") + and sys.platform.startswith("linux") + ) + + +@dataclass +class CodexSandboxExecutionPolicy(BaseExecutionPolicy): + """Launch the shell through the Codex CLI sandbox. + + Ideal when you have the Codex CLI installed and want the additional syscall and + filesystem restrictions provided by Anthropic's Seatbelt (macOS) or Landlock/seccomp + (Linux) profiles. Commands still run on the host, but within the sandbox requested by + the CLI. If the Codex binary is unavailable or the runtime lacks the required + kernel features (e.g., Landlock inside some containers), process startup fails with a + :class:`RuntimeError`. + + Configure sandbox behaviour via ``config_overrides`` to align with your Codex CLI + profile. This policy does not add its own resource limits; combine it with + host-level guards (cgroups, container resource limits) as needed. + """ + + binary: str = "codex" + platform: typing.Literal["auto", "macos", "linux"] = "auto" + config_overrides: Mapping[str, typing.Any] = field(default_factory=dict) + + def spawn( + self, + *, + workspace: Path, + env: Mapping[str, str], + command: Sequence[str], + ) -> subprocess.Popen[str]: + full_command = self._build_command(command) + return _launch_subprocess( + full_command, + env=env, + cwd=workspace, + preexec_fn=None, + start_new_session=False, + ) + + def _build_command(self, command: Sequence[str]) -> list[str]: + binary = self._resolve_binary() + platform_arg = self._determine_platform() + full_command: list[str] = [binary, "sandbox", platform_arg] + for key, value in sorted(dict(self.config_overrides).items()): + full_command.extend(["-c", f"{key}={self._format_override(value)}"]) + full_command.append("--") + full_command.extend(command) + return full_command + + def _resolve_binary(self) -> str: + path = shutil.which(self.binary) + if path is None: + msg = ( + "Codex sandbox policy requires the '%s' CLI to be installed and available on PATH." + ) + raise RuntimeError(msg % self.binary) + return path + + def _determine_platform(self) -> str: + if self.platform != "auto": + return self.platform + if sys.platform.startswith("linux"): + return "linux" + if sys.platform == "darwin": + return "macos" + msg = ( + "Codex sandbox policy could not determine a supported platform; " + "set 'platform' explicitly." + ) + raise RuntimeError(msg) + + @staticmethod + def _format_override(value: typing.Any) -> str: + try: + return json.dumps(value) + except TypeError: + return str(value) + + +@dataclass +class DockerExecutionPolicy(BaseExecutionPolicy): + """Run the shell inside a dedicated Docker container. + + Choose this policy when commands originate from untrusted users or you require + strong isolation between sessions. By default the workspace is bind-mounted only when + it refers to an existing non-temporary directory; ephemeral sessions run without a + mount to minimise host exposure. The container's network namespace is disabled by + default (``--network none``) and you can enable further hardening via + ``read_only_rootfs`` and ``user``. + + The security guarantees depend on your Docker daemon configuration. Run the agent on + a host where Docker is locked down (rootless mode, AppArmor/SELinux, etc.) and review + any additional volumes or capabilities passed through ``extra_run_args``. The default + image is ``python:3.12-alpine3.19``; supply a custom image if you need preinstalled + tooling. + """ + + binary: str = "docker" + image: str = "python:3.12-alpine3.19" + remove_container_on_exit: bool = True + network_enabled: bool = False + extra_run_args: Sequence[str] | None = None + memory_bytes: int | None = None + cpu_time_seconds: typing.Any | None = None + cpus: str | None = None + read_only_rootfs: bool = False + user: str | None = None + + def __post_init__(self) -> None: + super().__post_init__() + if self.memory_bytes is not None and self.memory_bytes <= 0: + msg = "memory_bytes must be positive if provided." + raise ValueError(msg) + if self.cpu_time_seconds is not None: + msg = ( + "DockerExecutionPolicy does not support cpu_time_seconds; configure CPU limits " + "using Docker run options such as '--cpus'." + ) + raise RuntimeError(msg) + if self.cpus is not None and not self.cpus.strip(): + msg = "cpus must be a non-empty string when provided." + raise ValueError(msg) + if self.user is not None and not self.user.strip(): + msg = "user must be a non-empty string when provided." + raise ValueError(msg) + self.extra_run_args = tuple(self.extra_run_args or ()) + + def spawn( + self, + *, + workspace: Path, + env: Mapping[str, str], + command: Sequence[str], + ) -> subprocess.Popen[str]: + full_command = self._build_command(workspace, env, command) + host_env = os.environ.copy() + return _launch_subprocess( + full_command, + env=host_env, + cwd=workspace, + preexec_fn=None, + start_new_session=False, + ) + + def _build_command( + self, + workspace: Path, + env: Mapping[str, str], + command: Sequence[str], + ) -> list[str]: + binary = self._resolve_binary() + full_command: list[str] = [binary, "run", "-i"] + if self.remove_container_on_exit: + full_command.append("--rm") + if not self.network_enabled: + full_command.extend(["--network", "none"]) + if self.memory_bytes is not None: + full_command.extend(["--memory", str(self.memory_bytes)]) + if self._should_mount_workspace(workspace): + host_path = str(workspace) + full_command.extend(["-v", f"{host_path}:{host_path}"]) + full_command.extend(["-w", host_path]) + else: + full_command.extend(["-w", "/"]) + if self.read_only_rootfs: + full_command.append("--read-only") + for key, value in env.items(): + full_command.extend(["-e", f"{key}={value}"]) + if self.cpus is not None: + full_command.extend(["--cpus", self.cpus]) + if self.user is not None: + full_command.extend(["--user", self.user]) + if self.extra_run_args: + full_command.extend(self.extra_run_args) + full_command.append(self.image) + full_command.extend(command) + return full_command + + @staticmethod + def _should_mount_workspace(workspace: Path) -> bool: + return not workspace.name.startswith(SHELL_TEMP_PREFIX) + + def _resolve_binary(self) -> str: + path = shutil.which(self.binary) + if path is None: + msg = ( + "Docker execution policy requires the '%s' CLI to be installed" + " and available on PATH." + ) + raise RuntimeError(msg % self.binary) + return path + + +__all__ = [ + "BaseExecutionPolicy", + "CodexSandboxExecutionPolicy", + "DockerExecutionPolicy", + "HostExecutionPolicy", +] diff --git a/libs/langchain_v1/langchain/agents/middleware/_redaction.py b/libs/langchain_v1/langchain/agents/middleware/_redaction.py new file mode 100644 index 00000000000..ba4755b8ce8 --- /dev/null +++ b/libs/langchain_v1/langchain/agents/middleware/_redaction.py @@ -0,0 +1,350 @@ +"""Shared redaction utilities for middleware components.""" + +from __future__ import annotations + +import hashlib +import ipaddress +import re +from collections.abc import Callable, Sequence +from dataclasses import dataclass +from typing import Literal +from urllib.parse import urlparse + +from typing_extensions import TypedDict + +RedactionStrategy = Literal["block", "redact", "mask", "hash"] +"""Supported strategies for handling detected sensitive values.""" + + +class PIIMatch(TypedDict): + """Represents an individual match of sensitive data.""" + + type: str + value: str + start: int + end: int + + +class PIIDetectionError(Exception): + """Raised when configured to block on detected sensitive values.""" + + def __init__(self, pii_type: str, matches: Sequence[PIIMatch]) -> None: + """Initialize the exception with match context. + + Args: + pii_type: Name of the detected sensitive type. + matches: All matches that were detected for that type. + """ + self.pii_type = pii_type + self.matches = list(matches) + count = len(matches) + msg = f"Detected {count} instance(s) of {pii_type} in text content" + super().__init__(msg) + + +Detector = Callable[[str], list[PIIMatch]] +"""Callable signature for detectors that locate sensitive values.""" + + +def detect_email(content: str) -> list[PIIMatch]: + """Detect email addresses in content.""" + pattern = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b" + return [ + PIIMatch( + type="email", + value=match.group(), + start=match.start(), + end=match.end(), + ) + for match in re.finditer(pattern, content) + ] + + +def detect_credit_card(content: str) -> list[PIIMatch]: + """Detect credit card numbers in content using Luhn validation.""" + pattern = r"\b\d{4}[\s-]?\d{4}[\s-]?\d{4}[\s-]?\d{4}\b" + matches = [] + + for match in re.finditer(pattern, content): + card_number = match.group() + if _passes_luhn(card_number): + matches.append( + PIIMatch( + type="credit_card", + value=card_number, + start=match.start(), + end=match.end(), + ) + ) + + return matches + + +def detect_ip(content: str) -> list[PIIMatch]: + """Detect IPv4 or IPv6 addresses in content.""" + matches: list[PIIMatch] = [] + ipv4_pattern = r"\b(?:[0-9]{1,3}\.){3}[0-9]{1,3}\b" + + for match in re.finditer(ipv4_pattern, content): + ip_candidate = match.group() + try: + ipaddress.ip_address(ip_candidate) + except ValueError: + continue + matches.append( + PIIMatch( + type="ip", + value=ip_candidate, + start=match.start(), + end=match.end(), + ) + ) + + return matches + + +def detect_mac_address(content: str) -> list[PIIMatch]: + """Detect MAC addresses in content.""" + pattern = r"\b([0-9A-Fa-f]{2}[:-]){5}[0-9A-Fa-f]{2}\b" + return [ + PIIMatch( + type="mac_address", + value=match.group(), + start=match.start(), + end=match.end(), + ) + for match in re.finditer(pattern, content) + ] + + +def detect_url(content: str) -> list[PIIMatch]: + """Detect URLs in content using regex and stdlib validation.""" + matches: list[PIIMatch] = [] + + # Pattern 1: URLs with scheme (http:// or https://) + scheme_pattern = r"https?://[^\s<>\"{}|\\^`\[\]]+" + + for match in re.finditer(scheme_pattern, content): + url = match.group() + result = urlparse(url) + if result.scheme in ("http", "https") and result.netloc: + matches.append( + PIIMatch( + type="url", + value=url, + start=match.start(), + end=match.end(), + ) + ) + + # Pattern 2: URLs without scheme (www.example.com or example.com/path) + # More conservative to avoid false positives + bare_pattern = ( + r"\b(?:www\.)?[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?" + r"(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?:/[^\s]*)?" + ) + + for match in re.finditer(bare_pattern, content): + start, end = match.start(), match.end() + # Skip if already matched with scheme + if any(m["start"] <= start < m["end"] or m["start"] < end <= m["end"] for m in matches): + continue + + url = match.group() + # Only accept if it has a path or starts with www + # This reduces false positives like "example.com" in prose + if "/" in url or url.startswith("www."): + # Add scheme for validation (required for urlparse to work correctly) + test_url = f"http://{url}" + result = urlparse(test_url) + if result.netloc and "." in result.netloc: + matches.append( + PIIMatch( + type="url", + value=url, + start=start, + end=end, + ) + ) + + return matches + + +BUILTIN_DETECTORS: dict[str, Detector] = { + "email": detect_email, + "credit_card": detect_credit_card, + "ip": detect_ip, + "mac_address": detect_mac_address, + "url": detect_url, +} +"""Registry of built-in detectors keyed by type name.""" + + +def _passes_luhn(card_number: str) -> bool: + """Validate credit card number using the Luhn checksum.""" + digits = [int(d) for d in card_number if d.isdigit()] + if not 13 <= len(digits) <= 19: + return False + + checksum = 0 + for index, digit in enumerate(reversed(digits)): + value = digit + if index % 2 == 1: + value *= 2 + if value > 9: + value -= 9 + checksum += value + return checksum % 10 == 0 + + +def _apply_redact_strategy(content: str, matches: list[PIIMatch]) -> str: + result = content + for match in sorted(matches, key=lambda item: item["start"], reverse=True): + replacement = f"[REDACTED_{match['type'].upper()}]" + result = result[: match["start"]] + replacement + result[match["end"] :] + return result + + +def _apply_mask_strategy(content: str, matches: list[PIIMatch]) -> str: + result = content + for match in sorted(matches, key=lambda item: item["start"], reverse=True): + value = match["value"] + pii_type = match["type"] + if pii_type == "email": + parts = value.split("@") + if len(parts) == 2: + domain_parts = parts[1].split(".") + masked = ( + f"{parts[0]}@****.{domain_parts[-1]}" + if len(domain_parts) >= 2 + else f"{parts[0]}@****" + ) + else: + masked = "****" + elif pii_type == "credit_card": + digits_only = "".join(c for c in value if c.isdigit()) + separator = "-" if "-" in value else " " if " " in value else "" + if separator: + masked = f"****{separator}****{separator}****{separator}{digits_only[-4:]}" + else: + masked = f"************{digits_only[-4:]}" + elif pii_type == "ip": + octets = value.split(".") + masked = f"*.*.*.{octets[-1]}" if len(octets) == 4 else "****" + elif pii_type == "mac_address": + separator = ":" if ":" in value else "-" + masked = ( + f"**{separator}**{separator}**{separator}**{separator}**{separator}{value[-2:]}" + ) + elif pii_type == "url": + masked = "[MASKED_URL]" + else: + masked = f"****{value[-4:]}" if len(value) > 4 else "****" + result = result[: match["start"]] + masked + result[match["end"] :] + return result + + +def _apply_hash_strategy(content: str, matches: list[PIIMatch]) -> str: + result = content + for match in sorted(matches, key=lambda item: item["start"], reverse=True): + digest = hashlib.sha256(match["value"].encode()).hexdigest()[:8] + replacement = f"<{match['type']}_hash:{digest}>" + result = result[: match["start"]] + replacement + result[match["end"] :] + return result + + +def apply_strategy( + content: str, + matches: list[PIIMatch], + strategy: RedactionStrategy, +) -> str: + """Apply the configured strategy to matches within content.""" + if not matches: + return content + if strategy == "redact": + return _apply_redact_strategy(content, matches) + if strategy == "mask": + return _apply_mask_strategy(content, matches) + if strategy == "hash": + return _apply_hash_strategy(content, matches) + if strategy == "block": + raise PIIDetectionError(matches[0]["type"], matches) + msg = f"Unknown redaction strategy: {strategy}" + raise ValueError(msg) + + +def resolve_detector(pii_type: str, detector: Detector | str | None) -> Detector: + """Return a callable detector for the given configuration.""" + if detector is None: + if pii_type not in BUILTIN_DETECTORS: + msg = ( + f"Unknown PII type: {pii_type}. " + f"Must be one of {list(BUILTIN_DETECTORS.keys())} or provide a custom detector." + ) + raise ValueError(msg) + return BUILTIN_DETECTORS[pii_type] + if isinstance(detector, str): + pattern = re.compile(detector) + + def regex_detector(content: str) -> list[PIIMatch]: + return [ + PIIMatch( + type=pii_type, + value=match.group(), + start=match.start(), + end=match.end(), + ) + for match in pattern.finditer(content) + ] + + return regex_detector + return detector + + +@dataclass(frozen=True) +class RedactionRule: + """Configuration for handling a single PII type.""" + + pii_type: str + strategy: RedactionStrategy = "redact" + detector: Detector | str | None = None + + def resolve(self) -> ResolvedRedactionRule: + """Resolve runtime detector and return an immutable rule.""" + resolved_detector = resolve_detector(self.pii_type, self.detector) + return ResolvedRedactionRule( + pii_type=self.pii_type, + strategy=self.strategy, + detector=resolved_detector, + ) + + +@dataclass(frozen=True) +class ResolvedRedactionRule: + """Resolved redaction rule ready for execution.""" + + pii_type: str + strategy: RedactionStrategy + detector: Detector + + def apply(self, content: str) -> tuple[str, list[PIIMatch]]: + """Apply this rule to content, returning new content and matches.""" + matches = self.detector(content) + if not matches: + return content, [] + updated = apply_strategy(content, matches, self.strategy) + return updated, matches + + +__all__ = [ + "PIIDetectionError", + "PIIMatch", + "RedactionRule", + "ResolvedRedactionRule", + "apply_strategy", + "detect_credit_card", + "detect_email", + "detect_ip", + "detect_mac_address", + "detect_url", +] diff --git a/libs/langchain_v1/langchain/agents/middleware/context_editing.py b/libs/langchain_v1/langchain/agents/middleware/context_editing.py index c26e28dc330..3de8a380f7e 100644 --- a/libs/langchain_v1/langchain/agents/middleware/context_editing.py +++ b/libs/langchain_v1/langchain/agents/middleware/context_editing.py @@ -8,7 +8,7 @@ with any LangChain chat model. from __future__ import annotations -from collections.abc import Callable, Iterable, Sequence +from collections.abc import Awaitable, Callable, Iterable, Sequence from dataclasses import dataclass from typing import Literal @@ -22,7 +22,12 @@ from langchain_core.messages import ( from langchain_core.messages.utils import count_tokens_approximately from typing_extensions import Protocol -from langchain.agents.middleware.types import AgentMiddleware, ModelRequest +from langchain.agents.middleware.types import ( + AgentMiddleware, + ModelCallResult, + ModelRequest, + ModelResponse, +) DEFAULT_TOOL_PLACEHOLDER = "[cleared]" @@ -177,7 +182,7 @@ class ClearToolUsesEdit(ContextEdit): class ContextEditingMiddleware(AgentMiddleware): - """Middleware that automatically prunes tool results to manage context size. + """Automatically prunes tool results to manage context size. The middleware applies a sequence of edits when the total input token count exceeds configured thresholds. Currently the `ClearToolUsesEdit` strategy is @@ -193,7 +198,7 @@ class ContextEditingMiddleware(AgentMiddleware): edits: Iterable[ContextEdit] | None = None, token_count_method: Literal["approximate", "model"] = "approximate", # noqa: S107 ) -> None: - """Initialise a context editing middleware instance. + """Initializes a context editing middleware instance. Args: edits: Sequence of edit strategies to apply. Defaults to a single @@ -209,8 +214,8 @@ class ContextEditingMiddleware(AgentMiddleware): def wrap_model_call( self, request: ModelRequest, - handler: Callable[[ModelRequest], AIMessage], - ) -> AIMessage: + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelCallResult: """Apply context edits before invoking the model via handler.""" if not request.messages: return handler(request) @@ -220,9 +225,11 @@ class ContextEditingMiddleware(AgentMiddleware): def count_tokens(messages: Sequence[BaseMessage]) -> int: return count_tokens_approximately(messages) else: - system_msg = ( - [SystemMessage(content=request.system_prompt)] if request.system_prompt else [] - ) + system_msg = [] + if request.system_prompt and not isinstance(request.system_prompt, SystemMessage): + system_msg = [SystemMessage(content=request.system_prompt)] + elif request.system_prompt and isinstance(request.system_prompt, SystemMessage): + system_msg = [request.system_prompt] def count_tokens(messages: Sequence[BaseMessage]) -> int: return request.model.get_num_tokens_from_messages( @@ -234,6 +241,37 @@ class ContextEditingMiddleware(AgentMiddleware): return handler(request) + async def awrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelCallResult: + """Apply context edits before invoking the model via handler (async version).""" + if not request.messages: + return await handler(request) + + if self.token_count_method == "approximate": # noqa: S105 + + def count_tokens(messages: Sequence[BaseMessage]) -> int: + return count_tokens_approximately(messages) + else: + system_msg = [] + + if request.system_prompt and not isinstance(request.system_prompt, SystemMessage): + system_msg = [SystemMessage(content=request.system_prompt)] + elif request.system_prompt and isinstance(request.system_prompt, SystemMessage): + system_msg = [request.system_prompt] + + def count_tokens(messages: Sequence[BaseMessage]) -> int: + return request.model.get_num_tokens_from_messages( + system_msg + list(messages), request.tools + ) + + for edit in self.edits: + edit.apply(request.messages, count_tokens=count_tokens) + + return await handler(request) + __all__ = [ "ClearToolUsesEdit", diff --git a/libs/langchain_v1/langchain/agents/middleware/file_search.py b/libs/langchain_v1/langchain/agents/middleware/file_search.py new file mode 100644 index 00000000000..fe9efc60b02 --- /dev/null +++ b/libs/langchain_v1/langchain/agents/middleware/file_search.py @@ -0,0 +1,382 @@ +"""File search middleware for Anthropic text editor and memory tools. + +This module provides Glob and Grep search tools that operate on files stored +in state or filesystem. +""" + +from __future__ import annotations + +import fnmatch +import json +import re +import subprocess +from contextlib import suppress +from datetime import datetime, timezone +from pathlib import Path +from typing import Literal + +from langchain_core.tools import tool + +from langchain.agents.middleware.types import AgentMiddleware + + +def _expand_include_patterns(pattern: str) -> list[str] | None: + """Expand brace patterns like ``*.{py,pyi}`` into a list of globs.""" + if "}" in pattern and "{" not in pattern: + return None + + expanded: list[str] = [] + + def _expand(current: str) -> None: + start = current.find("{") + if start == -1: + expanded.append(current) + return + + end = current.find("}", start) + if end == -1: + raise ValueError + + prefix = current[:start] + suffix = current[end + 1 :] + inner = current[start + 1 : end] + if not inner: + raise ValueError + + for option in inner.split(","): + _expand(prefix + option + suffix) + + try: + _expand(pattern) + except ValueError: + return None + + return expanded + + +def _is_valid_include_pattern(pattern: str) -> bool: + """Validate glob pattern used for include filters.""" + if not pattern: + return False + + if any(char in pattern for char in ("\x00", "\n", "\r")): + return False + + expanded = _expand_include_patterns(pattern) + if expanded is None: + return False + + try: + for candidate in expanded: + re.compile(fnmatch.translate(candidate)) + except re.error: + return False + + return True + + +def _match_include_pattern(basename: str, pattern: str) -> bool: + """Return True if the basename matches the include pattern.""" + expanded = _expand_include_patterns(pattern) + if not expanded: + return False + + return any(fnmatch.fnmatch(basename, candidate) for candidate in expanded) + + +class FilesystemFileSearchMiddleware(AgentMiddleware): + """Provides Glob and Grep search over filesystem files. + + This middleware adds two tools that search through local filesystem: + - Glob: Fast file pattern matching by file path + - Grep: Fast content search using ripgrep or Python fallback + + Example: + ```python + from langchain.agents import create_agent + from langchain.agents.middleware import ( + FilesystemFileSearchMiddleware, + ) + + agent = create_agent( + model=model, + tools=[], + middleware=[ + FilesystemFileSearchMiddleware(root_path="/workspace"), + ], + ) + ``` + """ + + def __init__( + self, + *, + root_path: str, + use_ripgrep: bool = True, + max_file_size_mb: int = 10, + ) -> None: + """Initialize the search middleware. + + Args: + root_path: Root directory to search. + use_ripgrep: Whether to use ripgrep for search (default: True). + Falls back to Python if ripgrep unavailable. + max_file_size_mb: Maximum file size to search in MB (default: 10). + """ + self.root_path = Path(root_path).resolve() + self.use_ripgrep = use_ripgrep + self.max_file_size_bytes = max_file_size_mb * 1024 * 1024 + + # Create tool instances as closures that capture self + @tool + def glob_search(pattern: str, path: str = "/") -> str: + """Fast file pattern matching tool that works with any codebase size. + + Supports glob patterns like **/*.js or src/**/*.ts. + Returns matching file paths sorted by modification time. + Use this tool when you need to find files by name patterns. + + Args: + pattern: The glob pattern to match files against. + path: The directory to search in. If not specified, searches from root. + + Returns: + Newline-separated list of matching file paths, sorted by modification + time (most recently modified first). Returns "No files found" if no + matches. + """ + try: + base_full = self._validate_and_resolve_path(path) + except ValueError: + return "No files found" + + if not base_full.exists() or not base_full.is_dir(): + return "No files found" + + # Use pathlib glob + matching: list[tuple[str, str]] = [] + for match in base_full.glob(pattern): + if match.is_file(): + # Convert to virtual path + virtual_path = "/" + str(match.relative_to(self.root_path)) + stat = match.stat() + modified_at = datetime.fromtimestamp(stat.st_mtime, tz=timezone.utc).isoformat() + matching.append((virtual_path, modified_at)) + + if not matching: + return "No files found" + + file_paths = [p for p, _ in matching] + return "\n".join(file_paths) + + @tool + def grep_search( + pattern: str, + path: str = "/", + include: str | None = None, + output_mode: Literal["files_with_matches", "content", "count"] = "files_with_matches", + ) -> str: + """Fast content search tool that works with any codebase size. + + Searches file contents using regular expressions. Supports full regex + syntax and filters files by pattern with the include parameter. + + Args: + pattern: The regular expression pattern to search for in file contents. + path: The directory to search in. If not specified, searches from root. + include: File pattern to filter (e.g., "*.js", "*.{ts,tsx}"). + output_mode: Output format: + - "files_with_matches": Only file paths containing matches (default) + - "content": Matching lines with file:line:content format + - "count": Count of matches per file + + Returns: + Search results formatted according to output_mode. Returns "No matches + found" if no results. + """ + # Compile regex pattern (for validation) + try: + re.compile(pattern) + except re.error as e: + return f"Invalid regex pattern: {e}" + + if include and not _is_valid_include_pattern(include): + return "Invalid include pattern" + + # Try ripgrep first if enabled + results = None + if self.use_ripgrep: + with suppress( + FileNotFoundError, + subprocess.CalledProcessError, + subprocess.TimeoutExpired, + ): + results = self._ripgrep_search(pattern, path, include) + + # Python fallback if ripgrep failed or is disabled + if results is None: + results = self._python_search(pattern, path, include) + + if not results: + return "No matches found" + + # Format output based on mode + return self._format_grep_results(results, output_mode) + + self.glob_search = glob_search + self.grep_search = grep_search + self.tools = [glob_search, grep_search] + + def _validate_and_resolve_path(self, path: str) -> Path: + """Validate and resolve a virtual path to filesystem path.""" + # Normalize path + if not path.startswith("/"): + path = "/" + path + + # Check for path traversal + if ".." in path or "~" in path: + msg = "Path traversal not allowed" + raise ValueError(msg) + + # Convert virtual path to filesystem path + relative = path.lstrip("/") + full_path = (self.root_path / relative).resolve() + + # Ensure path is within root + try: + full_path.relative_to(self.root_path) + except ValueError: + msg = f"Path outside root directory: {path}" + raise ValueError(msg) from None + + return full_path + + def _ripgrep_search( + self, pattern: str, base_path: str, include: str | None + ) -> dict[str, list[tuple[int, str]]]: + """Search using ripgrep subprocess.""" + try: + base_full = self._validate_and_resolve_path(base_path) + except ValueError: + return {} + + if not base_full.exists(): + return {} + + # Build ripgrep command + cmd = ["rg", "--json"] + + if include: + # Convert glob pattern to ripgrep glob + cmd.extend(["--glob", include]) + + cmd.extend(["--", pattern, str(base_full)]) + + try: + result = subprocess.run( # noqa: S603 + cmd, + capture_output=True, + text=True, + timeout=30, + check=False, + ) + except (subprocess.TimeoutExpired, FileNotFoundError): + # Fallback to Python search if ripgrep unavailable or times out + return self._python_search(pattern, base_path, include) + + # Parse ripgrep JSON output + results: dict[str, list[tuple[int, str]]] = {} + for line in result.stdout.splitlines(): + try: + data = json.loads(line) + if data["type"] == "match": + path = data["data"]["path"]["text"] + # Convert to virtual path + virtual_path = "/" + str(Path(path).relative_to(self.root_path)) + line_num = data["data"]["line_number"] + line_text = data["data"]["lines"]["text"].rstrip("\n") + + if virtual_path not in results: + results[virtual_path] = [] + results[virtual_path].append((line_num, line_text)) + except (json.JSONDecodeError, KeyError): + continue + + return results + + def _python_search( + self, pattern: str, base_path: str, include: str | None + ) -> dict[str, list[tuple[int, str]]]: + """Search using Python regex (fallback).""" + try: + base_full = self._validate_and_resolve_path(base_path) + except ValueError: + return {} + + if not base_full.exists(): + return {} + + regex = re.compile(pattern) + results: dict[str, list[tuple[int, str]]] = {} + + # Walk directory tree + for file_path in base_full.rglob("*"): + if not file_path.is_file(): + continue + + # Check include filter + if include and not _match_include_pattern(file_path.name, include): + continue + + # Skip files that are too large + if file_path.stat().st_size > self.max_file_size_bytes: + continue + + try: + content = file_path.read_text() + except (UnicodeDecodeError, PermissionError): + continue + + # Search content + for line_num, line in enumerate(content.splitlines(), 1): + if regex.search(line): + virtual_path = "/" + str(file_path.relative_to(self.root_path)) + if virtual_path not in results: + results[virtual_path] = [] + results[virtual_path].append((line_num, line)) + + return results + + def _format_grep_results( + self, + results: dict[str, list[tuple[int, str]]], + output_mode: str, + ) -> str: + """Format grep results based on output mode.""" + if output_mode == "files_with_matches": + # Just return file paths + return "\n".join(sorted(results.keys())) + + if output_mode == "content": + # Return file:line:content format + lines = [] + for file_path in sorted(results.keys()): + for line_num, line in results[file_path]: + lines.append(f"{file_path}:{line_num}:{line}") + return "\n".join(lines) + + if output_mode == "count": + # Return file:count format + lines = [] + for file_path in sorted(results.keys()): + count = len(results[file_path]) + lines.append(f"{file_path}:{count}") + return "\n".join(lines) + + # Default to files_with_matches + return "\n".join(sorted(results.keys())) + + +__all__ = [ + "FilesystemFileSearchMiddleware", +] diff --git a/libs/langchain_v1/langchain/agents/middleware/human_in_the_loop.py b/libs/langchain_v1/langchain/agents/middleware/human_in_the_loop.py index 071a8d9cf4a..cc1a4f2df3f 100644 --- a/libs/langchain_v1/langchain/agents/middleware/human_in_the_loop.py +++ b/libs/langchain_v1/langchain/agents/middleware/human_in_the_loop.py @@ -10,89 +10,93 @@ from typing_extensions import NotRequired, TypedDict from langchain.agents.middleware.types import AgentMiddleware, AgentState -class HumanInTheLoopConfig(TypedDict): - """Configuration that defines what actions are allowed for a human interrupt. +class Action(TypedDict): + """Represents an action with a name and args.""" - This controls the available interaction options when the graph is paused for human input. - """ + name: str + """The type or name of action being requested (e.g., "add_numbers").""" - allow_accept: NotRequired[bool] - """Whether the human can approve the current action without changes.""" - allow_edit: NotRequired[bool] - """Whether the human can approve the current action with edited content.""" - allow_respond: NotRequired[bool] - """Whether the human can reject the current action with feedback.""" + args: dict[str, Any] + """Key-value pairs of args needed for the action (e.g., {"a": 1, "b": 2}).""" class ActionRequest(TypedDict): - """Represents a request with a name and arguments.""" + """Represents an action request with a name, args, and description.""" - action: str - """The type or name of action being requested (e.g., "add_numbers").""" - args: dict - """Key-value pairs of arguments needed for the action (e.g., {"a": 1, "b": 2}).""" + name: str + """The name of the action being requested.""" + + args: dict[str, Any] + """Key-value pairs of args needed for the action (e.g., {"a": 1, "b": 2}).""" + + description: NotRequired[str] + """The description of the action to be reviewed.""" -class HumanInTheLoopRequest(TypedDict): - """Represents an interrupt triggered by the graph that requires human intervention. - - Example: - ```python - # Extract a tool call from the state and create an interrupt request - request = HumanInterrupt( - action_request=ActionRequest( - action="run_command", # The action being requested - args={"command": "ls", "args": ["-l"]}, # Arguments for the action - ), - config=HumanInTheLoopConfig( - allow_accept=True, # Allow approval - allow_respond=True, # Allow rejection with feedback - allow_edit=False, # Don't allow approval with edits - ), - description="Please review the command before execution", - ) - # Send the interrupt request and get the response - response = interrupt([request])[0] - ``` - """ - - action_request: ActionRequest - """The specific action being requested from the human.""" - config: HumanInTheLoopConfig - """Configuration defining what response types are allowed.""" - description: str | None - """Optional detailed description of what input is needed.""" +DecisionType = Literal["approve", "edit", "reject"] -class AcceptPayload(TypedDict): +class ReviewConfig(TypedDict): + """Policy for reviewing a HITL request.""" + + action_name: str + """Name of the action associated with this review configuration.""" + + allowed_decisions: list[DecisionType] + """The decisions that are allowed for this request.""" + + args_schema: NotRequired[dict[str, Any]] + """JSON schema for the args associated with the action, if edits are allowed.""" + + +class HITLRequest(TypedDict): + """Request for human feedback on a sequence of actions requested by a model.""" + + action_requests: list[ActionRequest] + """A list of agent actions for human review.""" + + review_configs: list[ReviewConfig] + """Review configuration for all possible actions.""" + + +class ApproveDecision(TypedDict): """Response when a human approves the action.""" - type: Literal["accept"] + type: Literal["approve"] """The type of response when a human approves the action.""" -class ResponsePayload(TypedDict): - """Response when a human rejects the action.""" - - type: Literal["response"] - """The type of response when a human rejects the action.""" - - args: NotRequired[str] - """The message to be sent to the model explaining why the action was rejected.""" - - -class EditPayload(TypedDict): +class EditDecision(TypedDict): """Response when a human edits the action.""" type: Literal["edit"] """The type of response when a human edits the action.""" - args: ActionRequest - """The action request with the edited content.""" + edited_action: Action + """Edited action for the agent to perform. + + Ex: for a tool call, a human reviewer can edit the tool name and args. + """ -HumanInTheLoopResponse = AcceptPayload | ResponsePayload | EditPayload -"""Aggregated response type for all possible human in the loop responses.""" +class RejectDecision(TypedDict): + """Response when a human rejects the action.""" + + type: Literal["reject"] + """The type of response when a human rejects the action.""" + + message: NotRequired[str] + """The message sent to the model explaining why the action was rejected.""" + + +Decision = ApproveDecision | EditDecision | RejectDecision + + +class HITLResponse(TypedDict): + """Response payload for a HITLRequest.""" + + decisions: list[Decision] + """The decisions made by the human.""" class _DescriptionFactory(Protocol): @@ -103,19 +107,21 @@ class _DescriptionFactory(Protocol): ... -class ToolConfig(TypedDict): - """Configuration for a tool requiring human in the loop.""" +class InterruptOnConfig(TypedDict): + """Configuration for an action requiring human in the loop. + + This is the configuration format used in the `HumanInTheLoopMiddleware.__init__` + method. + """ + + allowed_decisions: list[DecisionType] + """The decisions that are allowed for this action.""" - allow_accept: NotRequired[bool] - """Whether the human can approve the current action without changes.""" - allow_edit: NotRequired[bool] - """Whether the human can approve the current action with edited content.""" - allow_respond: NotRequired[bool] - """Whether the human can reject the current action with feedback.""" description: NotRequired[str | _DescriptionFactory] """The description attached to the request for human input. Can be either: + - A static string describing the approval request - A callable that dynamically generates the description based on agent state, runtime, and tool call information @@ -124,7 +130,7 @@ class ToolConfig(TypedDict): ```python # Static string description config = ToolConfig( - allow_accept=True, + allowed_decisions=["approve", "reject"], description="Please review this tool execution" ) @@ -140,12 +146,14 @@ class ToolConfig(TypedDict): f"Arguments:\\n{json.dumps(tool_call['args'], indent=2)}" ) - config = ToolConfig( - allow_accept=True, + config = InterruptOnConfig( + allowed_decisions=["approve", "edit", "reject"], description=format_tool_description ) ``` """ + args_schema: NotRequired[dict[str, Any]] + """JSON schema for the args associated with the action, if edits are allowed.""" class HumanInTheLoopMiddleware(AgentMiddleware): @@ -153,7 +161,7 @@ class HumanInTheLoopMiddleware(AgentMiddleware): def __init__( self, - interrupt_on: dict[str, bool | ToolConfig], + interrupt_on: dict[str, bool | InterruptOnConfig], *, description_prefix: str = "Tool execution requires approval", ) -> None: @@ -163,34 +171,110 @@ class HumanInTheLoopMiddleware(AgentMiddleware): interrupt_on: Mapping of tool name to allowed actions. If a tool doesn't have an entry, it's auto-approved by default. - * `True` indicates all actions are allowed: accept, edit, and respond. + * `True` indicates all decisions are allowed: approve, edit, and reject. * `False` indicates that the tool is auto-approved. - * `ToolConfig` indicates the specific actions allowed for this tool. - The ToolConfig can include a `description` field (str or callable) for - custom formatting of the interrupt description. + * `InterruptOnConfig` indicates the specific decisions allowed for this + tool. + The InterruptOnConfig can include a `description` field (`str` or + `Callable`) for custom formatting of the interrupt description. description_prefix: The prefix to use when constructing action requests. - This is used to provide context about the tool call and the action being requested. - Not used if a tool has a `description` in its ToolConfig. + This is used to provide context about the tool call and the action being + requested. Not used if a tool has a `description` in its + `InterruptOnConfig`. """ super().__init__() - resolved_tool_configs: dict[str, ToolConfig] = {} + resolved_configs: dict[str, InterruptOnConfig] = {} for tool_name, tool_config in interrupt_on.items(): if isinstance(tool_config, bool): if tool_config is True: - resolved_tool_configs[tool_name] = ToolConfig( - allow_accept=True, - allow_edit=True, - allow_respond=True, + resolved_configs[tool_name] = InterruptOnConfig( + allowed_decisions=["approve", "edit", "reject"] ) - elif any( - tool_config.get(x, False) for x in ["allow_accept", "allow_edit", "allow_respond"] - ): - resolved_tool_configs[tool_name] = tool_config - self.interrupt_on = resolved_tool_configs + elif tool_config.get("allowed_decisions"): + resolved_configs[tool_name] = tool_config + self.interrupt_on = resolved_configs self.description_prefix = description_prefix + def _create_action_and_config( + self, + tool_call: ToolCall, + config: InterruptOnConfig, + state: AgentState, + runtime: Runtime, + ) -> tuple[ActionRequest, ReviewConfig]: + """Create an ActionRequest and ReviewConfig for a tool call.""" + tool_name = tool_call["name"] + tool_args = tool_call["args"] + + # Generate description using the description field (str or callable) + description_value = config.get("description") + if callable(description_value): + description = description_value(tool_call, state, runtime) + elif description_value is not None: + description = description_value + else: + description = f"{self.description_prefix}\n\nTool: {tool_name}\nArgs: {tool_args}" + + # Create ActionRequest with description + action_request = ActionRequest( + name=tool_name, + args=tool_args, + description=description, + ) + + # Create ReviewConfig + # eventually can get tool information and populate args_schema from there + review_config = ReviewConfig( + action_name=tool_name, + allowed_decisions=config["allowed_decisions"], + ) + + return action_request, review_config + + def _process_decision( + self, + decision: Decision, + tool_call: ToolCall, + config: InterruptOnConfig, + ) -> tuple[ToolCall | None, ToolMessage | None]: + """Process a single decision and return the revised tool call and optional tool message.""" + allowed_decisions = config["allowed_decisions"] + + if decision["type"] == "approve" and "approve" in allowed_decisions: + return tool_call, None + if decision["type"] == "edit" and "edit" in allowed_decisions: + edited_action = decision["edited_action"] + return ( + ToolCall( + type="tool_call", + name=edited_action["name"], + args=edited_action["args"], + id=tool_call["id"], + ), + None, + ) + if decision["type"] == "reject" and "reject" in allowed_decisions: + # Create a tool message with the human's text response + content = decision.get("message") or ( + f"User rejected the tool call for `{tool_call['name']}` with id {tool_call['id']}" + ) + tool_message = ToolMessage( + content=content, + name=tool_call["name"], + tool_call_id=tool_call["id"], + status="error", + ) + return tool_call, tool_message + msg = ( + f"Unexpected human decision: {decision}. " + f"Decision type '{decision.get('type')}' " + f"is not allowed for tool '{tool_call['name']}'. " + f"Expected one of {allowed_decisions} based on the tool's configuration." + ) + raise ValueError(msg) + def after_model(self, state: AgentState, runtime: Runtime) -> dict[str, Any] | None: - """Trigger interrupt flows for relevant tool calls after an AIMessage.""" + """Trigger interrupt flows for relevant tool calls after an `AIMessage`.""" messages = state["messages"] if not messages: return None @@ -216,87 +300,50 @@ class HumanInTheLoopMiddleware(AgentMiddleware): revised_tool_calls: list[ToolCall] = auto_approved_tool_calls.copy() artificial_tool_messages: list[ToolMessage] = [] - # Create interrupt requests for all tools that need approval - interrupt_requests: list[HumanInTheLoopRequest] = [] + # Create action requests and review configs for all tools that need approval + action_requests: list[ActionRequest] = [] + review_configs: list[ReviewConfig] = [] + for tool_call in interrupt_tool_calls: - tool_name = tool_call["name"] - tool_args = tool_call["args"] - config = self.interrupt_on[tool_name] + config = self.interrupt_on[tool_call["name"]] - # Generate description using the description field (str or callable) - description_value = config.get("description") - if callable(description_value): - description = description_value(tool_call, state, runtime) - elif description_value is not None: - description = description_value - else: - description = f"{self.description_prefix}\n\nTool: {tool_name}\nArgs: {tool_args}" + # Create ActionRequest and ReviewConfig using helper method + action_request, review_config = self._create_action_and_config( + tool_call, config, state, runtime + ) + action_requests.append(action_request) + review_configs.append(review_config) - request: HumanInTheLoopRequest = { - "action_request": ActionRequest( - action=tool_name, - args=tool_args, - ), - "config": config, - "description": description, - } - interrupt_requests.append(request) + # Create single HITLRequest with all actions and configs + hitl_request = HITLRequest( + action_requests=action_requests, + review_configs=review_configs, + ) - responses: list[HumanInTheLoopResponse] = interrupt(interrupt_requests) + # Send interrupt and get response + hitl_response: HITLResponse = interrupt(hitl_request) + decisions = hitl_response["decisions"] - # Validate that the number of responses matches the number of interrupt tool calls - if (responses_len := len(responses)) != ( + # Validate that the number of decisions matches the number of interrupt tool calls + if (decisions_len := len(decisions)) != ( interrupt_tool_calls_len := len(interrupt_tool_calls) ): msg = ( - f"Number of human responses ({responses_len}) does not match " + f"Number of human decisions ({decisions_len}) does not match " f"number of hanging tool calls ({interrupt_tool_calls_len})." ) raise ValueError(msg) - for i, response in enumerate(responses): + # Process each decision using helper method + for i, decision in enumerate(decisions): tool_call = interrupt_tool_calls[i] config = self.interrupt_on[tool_call["name"]] - if response["type"] == "accept" and config.get("allow_accept"): - revised_tool_calls.append(tool_call) - elif response["type"] == "edit" and config.get("allow_edit"): - edited_action = response["args"] - revised_tool_calls.append( - ToolCall( - type="tool_call", - name=edited_action["action"], - args=edited_action["args"], - id=tool_call["id"], - ) - ) - elif response["type"] == "response" and config.get("allow_respond"): - # Create a tool message with the human's text response - content = response.get("args") or ( - f"User rejected the tool call for `{tool_call['name']}` " - f"with id {tool_call['id']}" - ) - tool_message = ToolMessage( - content=content, - name=tool_call["name"], - tool_call_id=tool_call["id"], - status="error", - ) - revised_tool_calls.append(tool_call) + revised_tool_call, tool_message = self._process_decision(decision, tool_call, config) + if revised_tool_call: + revised_tool_calls.append(revised_tool_call) + if tool_message: artificial_tool_messages.append(tool_message) - else: - allowed_actions = [ - action - for action in ["accept", "edit", "response"] - if config.get(f"allow_{'respond' if action == 'response' else action}") - ] - msg = ( - f"Unexpected human response: {response}. " - f"Response action '{response.get('type')}' " - f"is not allowed for tool '{tool_call['name']}'. " - f"Expected one of {allowed_actions} based on the tool's configuration." - ) - raise ValueError(msg) # Update the AI message to only include approved tool calls last_ai_msg.tool_calls = revised_tool_calls diff --git a/libs/langchain_v1/langchain/agents/middleware/model_call_limit.py b/libs/langchain_v1/langchain/agents/middleware/model_call_limit.py index 63f0be50fda..90dba99dfa3 100644 --- a/libs/langchain_v1/langchain/agents/middleware/model_call_limit.py +++ b/libs/langchain_v1/langchain/agents/middleware/model_call_limit.py @@ -2,16 +2,33 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Literal +from typing import TYPE_CHECKING, Annotated, Any, Literal from langchain_core.messages import AIMessage +from langgraph.channels.untracked_value import UntrackedValue +from typing_extensions import NotRequired -from langchain.agents.middleware.types import AgentMiddleware, AgentState, hook_config +from langchain.agents.middleware.types import ( + AgentMiddleware, + AgentState, + PrivateStateAttr, + hook_config, +) if TYPE_CHECKING: from langgraph.runtime import Runtime +class ModelCallLimitState(AgentState): + """State schema for ModelCallLimitMiddleware. + + Extends AgentState with model call tracking fields. + """ + + thread_model_call_count: NotRequired[Annotated[int, PrivateStateAttr]] + run_model_call_count: NotRequired[Annotated[int, UntrackedValue, PrivateStateAttr]] + + def _build_limit_exceeded_message( thread_count: int, run_count: int, @@ -69,8 +86,8 @@ class ModelCallLimitExceededError(Exception): super().__init__(msg) -class ModelCallLimitMiddleware(AgentMiddleware): - """Middleware that tracks model call counts and enforces limits. +class ModelCallLimitMiddleware(AgentMiddleware[ModelCallLimitState, Any]): + """Tracks model call counts and enforces limits. This middleware monitors the number of model calls made during agent execution and can terminate the agent when specified limits are reached. It supports @@ -97,6 +114,8 @@ class ModelCallLimitMiddleware(AgentMiddleware): ``` """ + state_schema = ModelCallLimitState + def __init__( self, *, @@ -108,17 +127,16 @@ class ModelCallLimitMiddleware(AgentMiddleware): Args: thread_limit: Maximum number of model calls allowed per thread. - None means no limit. Defaults to `None`. + None means no limit. run_limit: Maximum number of model calls allowed per run. - None means no limit. Defaults to `None`. + None means no limit. exit_behavior: What to do when limits are exceeded. - "end": Jump to the end of the agent execution and inject an artificial AI message indicating that the limit was exceeded. - - "error": Raise a ModelCallLimitExceededError - Defaults to "end". + - "error": Raise a `ModelCallLimitExceededError` Raises: - ValueError: If both limits are None or if exit_behavior is invalid. + ValueError: If both limits are `None` or if `exit_behavior` is invalid. """ super().__init__() @@ -135,7 +153,7 @@ class ModelCallLimitMiddleware(AgentMiddleware): self.exit_behavior = exit_behavior @hook_config(can_jump_to=["end"]) - def before_model(self, state: AgentState, runtime: Runtime) -> dict[str, Any] | None: # noqa: ARG002 + def before_model(self, state: ModelCallLimitState, runtime: Runtime) -> dict[str, Any] | None: # noqa: ARG002 """Check model call limits before making a model call. Args: @@ -175,3 +193,18 @@ class ModelCallLimitMiddleware(AgentMiddleware): return {"jump_to": "end", "messages": [limit_ai_message]} return None + + def after_model(self, state: ModelCallLimitState, runtime: Runtime) -> dict[str, Any] | None: # noqa: ARG002 + """Increment model call counts after a model call. + + Args: + state: The current agent state. + runtime: The langgraph runtime. + + Returns: + State updates with incremented call counts. + """ + return { + "thread_model_call_count": state.get("thread_model_call_count", 0) + 1, + "run_model_call_count": state.get("run_model_call_count", 0) + 1, + } diff --git a/libs/langchain_v1/langchain/agents/middleware/model_fallback.py b/libs/langchain_v1/langchain/agents/middleware/model_fallback.py index 048eb8edb88..6ac85e57751 100644 --- a/libs/langchain_v1/langchain/agents/middleware/model_fallback.py +++ b/libs/langchain_v1/langchain/agents/middleware/model_fallback.py @@ -6,15 +6,16 @@ from typing import TYPE_CHECKING from langchain.agents.middleware.types import ( AgentMiddleware, + ModelCallResult, ModelRequest, + ModelResponse, ) from langchain.chat_models import init_chat_model if TYPE_CHECKING: - from collections.abc import Callable + from collections.abc import Awaitable, Callable from langchain_core.language_models.chat_models import BaseChatModel - from langchain_core.messages import AIMessage class ModelFallbackMiddleware(AgentMiddleware): @@ -30,7 +31,7 @@ class ModelFallbackMiddleware(AgentMiddleware): fallback = ModelFallbackMiddleware( "openai:gpt-4o-mini", # Try first on error - "anthropic:claude-3-5-sonnet-20241022", # Then this + "anthropic:claude-sonnet-4-5-20250929", # Then this ) agent = create_agent( @@ -38,7 +39,7 @@ class ModelFallbackMiddleware(AgentMiddleware): middleware=[fallback], ) - # If primary fails: tries gpt-4o-mini, then claude-3-5-sonnet + # If primary fails: tries gpt-4o-mini, then claude-sonnet-4-5-20250929 result = await agent.invoke({"messages": [HumanMessage("Hello")]}) ``` """ @@ -68,14 +69,12 @@ class ModelFallbackMiddleware(AgentMiddleware): def wrap_model_call( self, request: ModelRequest, - handler: Callable[[ModelRequest], AIMessage], - ) -> AIMessage: + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelCallResult: """Try fallback models in sequence on errors. Args: request: Initial model request. - state: Current agent state. - runtime: LangGraph runtime. handler: Callback to execute the model. Returns: @@ -101,3 +100,38 @@ class ModelFallbackMiddleware(AgentMiddleware): continue raise last_exception + + async def awrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelCallResult: + """Try fallback models in sequence on errors (async version). + + Args: + request: Initial model request. + handler: Async callback to execute the model. + + Returns: + AIMessage from successful model call. + + Raises: + Exception: If all models fail, re-raises last exception. + """ + # Try primary model first + last_exception: Exception + try: + return await handler(request) + except Exception as e: # noqa: BLE001 + last_exception = e + + # Try fallback models + for fallback_model in self.models: + request.model = fallback_model + try: + return await handler(request) + except Exception as e: # noqa: BLE001 + last_exception = e + continue + + raise last_exception diff --git a/libs/langchain_v1/langchain/agents/middleware/pii.py b/libs/langchain_v1/langchain/agents/middleware/pii.py index 00e28bfe26b..4ca139174a3 100644 --- a/libs/langchain_v1/langchain/agents/middleware/pii.py +++ b/libs/langchain_v1/langchain/agents/middleware/pii.py @@ -2,15 +2,22 @@ from __future__ import annotations -import hashlib -import ipaddress -import re from typing import TYPE_CHECKING, Any, Literal -from urllib.parse import urlparse from langchain_core.messages import AIMessage, AnyMessage, HumanMessage, ToolMessage -from typing_extensions import TypedDict +from langchain.agents.middleware._redaction import ( + PIIDetectionError, + PIIMatch, + RedactionRule, + ResolvedRedactionRule, + apply_strategy, + detect_credit_card, + detect_email, + detect_ip, + detect_mac_address, + detect_url, +) from langchain.agents.middleware.types import AgentMiddleware, AgentState, hook_config if TYPE_CHECKING: @@ -19,396 +26,6 @@ if TYPE_CHECKING: from langgraph.runtime import Runtime -class PIIMatch(TypedDict): - """Represents a detected PII match in text.""" - - type: str - """The type of PII detected (e.g., 'email', 'ssn', 'credit_card').""" - value: str - """The actual matched text.""" - start: int - """Starting position of the match in the text.""" - end: int - """Ending position of the match in the text.""" - - -class PIIDetectionError(Exception): - """Exception raised when PII is detected and strategy is 'block'.""" - - def __init__(self, pii_type: str, matches: list[PIIMatch]) -> None: - """Initialize the exception with PII detection information. - - Args: - pii_type: The type of PII that was detected. - matches: List of PII matches found. - """ - self.pii_type = pii_type - self.matches = matches - count = len(matches) - msg = f"Detected {count} instance(s) of {pii_type} in message content" - super().__init__(msg) - - -# ============================================================================ -# PII Detection Functions -# ============================================================================ - - -def _luhn_checksum(card_number: str) -> bool: - """Validate credit card number using Luhn algorithm. - - Args: - card_number: Credit card number string (digits only). - - Returns: - True if the number passes Luhn validation, False otherwise. - """ - digits = [int(d) for d in card_number if d.isdigit()] - - if len(digits) < 13 or len(digits) > 19: - return False - - checksum = 0 - for i, digit in enumerate(reversed(digits)): - d = digit - if i % 2 == 1: - d *= 2 - if d > 9: - d -= 9 - checksum += d - - return checksum % 10 == 0 - - -def detect_email(content: str) -> list[PIIMatch]: - """Detect email addresses in content. - - Args: - content: Text content to scan. - - Returns: - List of detected email matches. - """ - pattern = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b" - return [ - PIIMatch( - type="email", - value=match.group(), - start=match.start(), - end=match.end(), - ) - for match in re.finditer(pattern, content) - ] - - -def detect_credit_card(content: str) -> list[PIIMatch]: - """Detect credit card numbers in content using Luhn validation. - - Detects cards in formats like: - - 1234567890123456 - - 1234 5678 9012 3456 - - 1234-5678-9012-3456 - - Args: - content: Text content to scan. - - Returns: - List of detected credit card matches. - """ - # Match various credit card formats - pattern = r"\b\d{4}[\s-]?\d{4}[\s-]?\d{4}[\s-]?\d{4}\b" - matches = [] - - for match in re.finditer(pattern, content): - card_number = match.group() - # Validate with Luhn algorithm - if _luhn_checksum(card_number): - matches.append( - PIIMatch( - type="credit_card", - value=card_number, - start=match.start(), - end=match.end(), - ) - ) - - return matches - - -def detect_ip(content: str) -> list[PIIMatch]: - """Detect IP addresses in content using stdlib validation. - - Validates both IPv4 and IPv6 addresses. - - Args: - content: Text content to scan. - - Returns: - List of detected IP address matches. - """ - matches = [] - - # IPv4 pattern - ipv4_pattern = r"\b(?:[0-9]{1,3}\.){3}[0-9]{1,3}\b" - - for match in re.finditer(ipv4_pattern, content): - ip_str = match.group() - try: - # Validate with stdlib - ipaddress.ip_address(ip_str) - matches.append( - PIIMatch( - type="ip", - value=ip_str, - start=match.start(), - end=match.end(), - ) - ) - except ValueError: - # Not a valid IP address - pass - - return matches - - -def detect_mac_address(content: str) -> list[PIIMatch]: - """Detect MAC addresses in content. - - Detects formats like: - - 00:1A:2B:3C:4D:5E - - 00-1A-2B-3C-4D-5E - - Args: - content: Text content to scan. - - Returns: - List of detected MAC address matches. - """ - pattern = r"\b([0-9A-Fa-f]{2}[:-]){5}[0-9A-Fa-f]{2}\b" - return [ - PIIMatch( - type="mac_address", - value=match.group(), - start=match.start(), - end=match.end(), - ) - for match in re.finditer(pattern, content) - ] - - -def detect_url(content: str) -> list[PIIMatch]: - """Detect URLs in content using regex and stdlib validation. - - Detects: - - http://example.com - - https://example.com/path - - www.example.com - - example.com/path - - Args: - content: Text content to scan. - - Returns: - List of detected URL matches. - """ - matches = [] - - # Pattern 1: URLs with scheme (http:// or https://) - scheme_pattern = r"https?://[^\s<>\"{}|\\^`\[\]]+" - - for match in re.finditer(scheme_pattern, content): - url = match.group() - try: - result = urlparse(url) - if result.scheme in ("http", "https") and result.netloc: - matches.append( - PIIMatch( - type="url", - value=url, - start=match.start(), - end=match.end(), - ) - ) - except Exception: # noqa: S110, BLE001 - # Invalid URL, skip - pass - - # Pattern 2: URLs without scheme (www.example.com or example.com/path) - # More conservative to avoid false positives - bare_pattern = r"\b(?:www\.)?[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?:/[^\s]*)?" # noqa: E501 - - for match in re.finditer(bare_pattern, content): - # Skip if already matched with scheme - if any( - m["start"] <= match.start() < m["end"] or m["start"] < match.end() <= m["end"] - for m in matches - ): - continue - - url = match.group() - # Only accept if it has a path or starts with www - # This reduces false positives like "example.com" in prose - if "/" in url or url.startswith("www."): - try: - # Add scheme for validation (required for urlparse to work correctly) - test_url = f"http://{url}" - result = urlparse(test_url) - if result.netloc and "." in result.netloc: - matches.append( - PIIMatch( - type="url", - value=url, - start=match.start(), - end=match.end(), - ) - ) - except Exception: # noqa: S110, BLE001 - # Invalid URL, skip - pass - - return matches - - -# Built-in detector registry -_BUILTIN_DETECTORS: dict[str, Callable[[str], list[PIIMatch]]] = { - "email": detect_email, - "credit_card": detect_credit_card, - "ip": detect_ip, - "mac_address": detect_mac_address, - "url": detect_url, -} - - -# ============================================================================ -# Strategy Implementations -# ============================================================================ - - -def _apply_redact_strategy(content: str, matches: list[PIIMatch]) -> str: - """Replace PII with [REDACTED_TYPE] placeholders. - - Args: - content: Original content. - matches: List of PII matches to redact. - - Returns: - Content with PII redacted. - """ - if not matches: - return content - - # Sort matches by start position in reverse to avoid offset issues - sorted_matches = sorted(matches, key=lambda m: m["start"], reverse=True) - - result = content - for match in sorted_matches: - replacement = f"[REDACTED_{match['type'].upper()}]" - result = result[: match["start"]] + replacement + result[match["end"] :] - - return result - - -def _apply_mask_strategy(content: str, matches: list[PIIMatch]) -> str: - """Partially mask PII, showing only last few characters. - - Args: - content: Original content. - matches: List of PII matches to mask. - - Returns: - Content with PII masked. - """ - if not matches: - return content - - # Sort matches by start position in reverse - sorted_matches = sorted(matches, key=lambda m: m["start"], reverse=True) - - result = content - for match in sorted_matches: - value = match["value"] - pii_type = match["type"] - - # Different masking strategies by type - if pii_type == "email": - # Show only domain: user@****.com - parts = value.split("@") - if len(parts) == 2: - domain_parts = parts[1].split(".") - if len(domain_parts) >= 2: - masked = f"{parts[0]}@****.{domain_parts[-1]}" - else: - masked = f"{parts[0]}@****" - else: - masked = "****" - - elif pii_type == "credit_card": - # Show last 4: ****-****-****-1234 - digits_only = "".join(c for c in value if c.isdigit()) - separator = "-" if "-" in value else " " if " " in value else "" - if separator: - masked = f"****{separator}****{separator}****{separator}{digits_only[-4:]}" - else: - masked = f"************{digits_only[-4:]}" - - elif pii_type == "ip": - # Show last octet: *.*.*. 123 - parts = value.split(".") - masked = f"*.*.*.{parts[-1]}" if len(parts) == 4 else "****" - - elif pii_type == "mac_address": - # Show last byte: **:**:**:**:**:5E - separator = ":" if ":" in value else "-" - masked = ( - f"**{separator}**{separator}**{separator}**{separator}**{separator}{value[-2:]}" - ) - - elif pii_type == "url": - # Mask everything: [MASKED_URL] - masked = "[MASKED_URL]" - - else: - # Default: show last 4 chars - masked = f"****{value[-4:]}" if len(value) > 4 else "****" - - result = result[: match["start"]] + masked + result[match["end"] :] - - return result - - -def _apply_hash_strategy(content: str, matches: list[PIIMatch]) -> str: - """Replace PII with deterministic hash including type information. - - Args: - content: Original content. - matches: List of PII matches to hash. - - Returns: - Content with PII replaced by hashes in format . - """ - if not matches: - return content - - # Sort matches by start position in reverse - sorted_matches = sorted(matches, key=lambda m: m["start"], reverse=True) - - result = content - for match in sorted_matches: - value = match["value"] - pii_type = match["type"] - # Create deterministic hash - hash_digest = hashlib.sha256(value.encode()).hexdigest()[:8] - replacement = f"<{pii_type}_hash:{hash_digest}>" - result = result[: match["start"]] + replacement + result[match["end"] :] - - return result - - -# ============================================================================ -# PIIMiddleware -# ============================================================================ - - class PIIMiddleware(AgentMiddleware): """Detect and handle Personally Identifiable Information (PII) in agent conversations. @@ -421,7 +38,7 @@ class PIIMiddleware(AgentMiddleware): - `credit_card`: Credit card numbers (validated with Luhn algorithm) - `ip`: IP addresses (validated with stdlib) - `mac_address`: MAC addresses - - `url`: URLs (both http/https and bare URLs) + - `url`: URLs (both `http`/`https` and bare URLs) Strategies: - `block`: Raise an exception when PII is detected @@ -431,14 +48,12 @@ class PIIMiddleware(AgentMiddleware): Strategy Selection Guide: - ======== =================== ======================================= - Strategy Preserves Identity? Best For - ======== =================== ======================================= - `block` N/A Avoid PII completely - `redact` No General compliance, log sanitization - `mask` No Human readability, customer service UIs - `hash` Yes (pseudonymous) Analytics, debugging - ======== =================== ======================================= + | Strategy | Preserves Identity? | Best For | + | -------- | ------------------- | --------------------------------------- | + | `block` | N/A | Avoid PII completely | + | `redact` | No | General compliance, log sanitization | + | `mask` | No | Human readability, customer service UIs | + | `hash` | Yes (pseudonymous) | Analytics, debugging | Example: ```python @@ -512,50 +127,34 @@ class PIIMiddleware(AgentMiddleware): """ super().__init__() - self.pii_type = pii_type - self.strategy = strategy self.apply_to_input = apply_to_input self.apply_to_output = apply_to_output self.apply_to_tool_results = apply_to_tool_results - # Resolve detector - if detector is None: - # Use built-in detector - if pii_type not in _BUILTIN_DETECTORS: - msg = ( - f"Unknown PII type: {pii_type}. " - f"Must be one of {list(_BUILTIN_DETECTORS.keys())} " - "or provide a custom detector." - ) - raise ValueError(msg) - self.detector = _BUILTIN_DETECTORS[pii_type] - elif isinstance(detector, str): - # Custom regex pattern - pattern = detector - - def regex_detector(content: str) -> list[PIIMatch]: - return [ - PIIMatch( - type=pii_type, - value=match.group(), - start=match.start(), - end=match.end(), - ) - for match in re.finditer(pattern, content) - ] - - self.detector = regex_detector - else: - # Custom callable detector - self.detector = detector + self._resolved_rule: ResolvedRedactionRule = RedactionRule( + pii_type=pii_type, + strategy=strategy, + detector=detector, + ).resolve() + self.pii_type = self._resolved_rule.pii_type + self.strategy = self._resolved_rule.strategy + self.detector = self._resolved_rule.detector @property def name(self) -> str: """Name of the middleware.""" return f"{self.__class__.__name__}[{self.pii_type}]" + def _process_content(self, content: str) -> tuple[str, list[PIIMatch]]: + """Apply the configured redaction rule to the provided content.""" + matches = self.detector(content) + if not matches: + return content, [] + sanitized = apply_strategy(content, matches, self.strategy) + return sanitized, matches + @hook_config(can_jump_to=["end"]) - def before_model( # noqa: PLR0915 + def before_model( self, state: AgentState, runtime: Runtime, # noqa: ARG002 @@ -596,25 +195,9 @@ class PIIMiddleware(AgentMiddleware): if last_user_idx is not None and last_user_msg and last_user_msg.content: # Detect PII in message content content = str(last_user_msg.content) - matches = self.detector(content) + new_content, matches = self._process_content(content) if matches: - # Apply strategy - if self.strategy == "block": - raise PIIDetectionError(self.pii_type, matches) - - if self.strategy == "redact": - new_content = _apply_redact_strategy(content, matches) - elif self.strategy == "mask": - new_content = _apply_mask_strategy(content, matches) - elif self.strategy == "hash": - new_content = _apply_hash_strategy(content, matches) - else: - # Should not reach here due to type hints - msg = f"Unknown strategy: {self.strategy}" - raise ValueError(msg) - - # Create updated message updated_message: AnyMessage = HumanMessage( content=new_content, id=last_user_msg.id, @@ -643,26 +226,11 @@ class PIIMiddleware(AgentMiddleware): continue content = str(tool_msg.content) - matches = self.detector(content) + new_content, matches = self._process_content(content) if not matches: continue - # Apply strategy - if self.strategy == "block": - raise PIIDetectionError(self.pii_type, matches) - - if self.strategy == "redact": - new_content = _apply_redact_strategy(content, matches) - elif self.strategy == "mask": - new_content = _apply_mask_strategy(content, matches) - elif self.strategy == "hash": - new_content = _apply_hash_strategy(content, matches) - else: - # Should not reach here due to type hints - msg = f"Unknown strategy: {self.strategy}" - raise ValueError(msg) - # Create updated tool message updated_message = ToolMessage( content=new_content, @@ -718,26 +286,11 @@ class PIIMiddleware(AgentMiddleware): # Detect PII in message content content = str(last_ai_msg.content) - matches = self.detector(content) + new_content, matches = self._process_content(content) if not matches: return None - # Apply strategy - if self.strategy == "block": - raise PIIDetectionError(self.pii_type, matches) - - if self.strategy == "redact": - new_content = _apply_redact_strategy(content, matches) - elif self.strategy == "mask": - new_content = _apply_mask_strategy(content, matches) - elif self.strategy == "hash": - new_content = _apply_hash_strategy(content, matches) - else: - # Should not reach here due to type hints - msg = f"Unknown strategy: {self.strategy}" - raise ValueError(msg) - # Create updated message updated_message = AIMessage( content=new_content, @@ -751,3 +304,14 @@ class PIIMiddleware(AgentMiddleware): new_messages[last_ai_idx] = updated_message return {"messages": new_messages} + + +__all__ = [ + "PIIDetectionError", + "PIIMiddleware", + "detect_credit_card", + "detect_email", + "detect_ip", + "detect_mac_address", + "detect_url", +] diff --git a/libs/langchain_v1/langchain/agents/middleware/prompt_caching.py b/libs/langchain_v1/langchain/agents/middleware/prompt_caching.py deleted file mode 100644 index ae640a7459d..00000000000 --- a/libs/langchain_v1/langchain/agents/middleware/prompt_caching.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Anthropic prompt caching middleware.""" - -from collections.abc import Callable -from typing import Literal -from warnings import warn - -from langchain_core.messages import AIMessage - -from langchain.agents.middleware.types import AgentMiddleware, ModelRequest - - -class AnthropicPromptCachingMiddleware(AgentMiddleware): - """Prompt Caching Middleware. - - Optimizes API usage by caching conversation prefixes for Anthropic models. - - Learn more about Anthropic prompt caching - [here](https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching). - """ - - def __init__( - self, - type: Literal["ephemeral"] = "ephemeral", - ttl: Literal["5m", "1h"] = "5m", - min_messages_to_cache: int = 0, - unsupported_model_behavior: Literal["ignore", "warn", "raise"] = "warn", - ) -> None: - """Initialize the middleware with cache control settings. - - Args: - type: The type of cache to use, only "ephemeral" is supported. - ttl: The time to live for the cache, only "5m" and "1h" are supported. - min_messages_to_cache: The minimum number of messages until the cache is used, - default is 0. - unsupported_model_behavior: The behavior to take when an unsupported model is used. - "ignore" will ignore the unsupported model and continue without caching. - "warn" will warn the user and continue without caching. - "raise" will raise an error and stop the agent. - """ - self.type = type - self.ttl = ttl - self.min_messages_to_cache = min_messages_to_cache - self.unsupported_model_behavior = unsupported_model_behavior - - def wrap_model_call( - self, - request: ModelRequest, - handler: Callable[[ModelRequest], AIMessage], - ) -> AIMessage: - """Modify the model request to add cache control blocks.""" - try: - from langchain_anthropic import ChatAnthropic - except ImportError: - ChatAnthropic = None # noqa: N806 - - msg: str | None = None - - if ChatAnthropic is None: - msg = ( - "AnthropicPromptCachingMiddleware caching middleware only supports " - "Anthropic models. " - "Please install langchain-anthropic." - ) - elif not isinstance(request.model, ChatAnthropic): - msg = ( - "AnthropicPromptCachingMiddleware caching middleware only supports " - f"Anthropic models, not instances of {type(request.model)}" - ) - - if msg is not None: - if self.unsupported_model_behavior == "raise": - raise ValueError(msg) - if self.unsupported_model_behavior == "warn": - warn(msg, stacklevel=3) - else: - return handler(request) - - messages_count = ( - len(request.messages) + 1 if request.system_prompt else len(request.messages) - ) - if messages_count < self.min_messages_to_cache: - return handler(request) - - request.model_settings["cache_control"] = {"type": self.type, "ttl": self.ttl} - - return handler(request) diff --git a/libs/langchain_v1/langchain/agents/middleware/shell_tool.py b/libs/langchain_v1/langchain/agents/middleware/shell_tool.py new file mode 100644 index 00000000000..563ef2a2c39 --- /dev/null +++ b/libs/langchain_v1/langchain/agents/middleware/shell_tool.py @@ -0,0 +1,718 @@ +"""Middleware that exposes a persistent shell tool to agents.""" + +from __future__ import annotations + +import contextlib +import logging +import os +import queue +import signal +import subprocess +import tempfile +import threading +import time +import typing +import uuid +import weakref +from dataclasses import dataclass, field +from pathlib import Path +from typing import TYPE_CHECKING, Annotated, Any, Literal + +from langchain_core.messages import ToolMessage +from langchain_core.tools.base import BaseTool, ToolException +from langgraph.channels.untracked_value import UntrackedValue +from pydantic import BaseModel, model_validator +from typing_extensions import NotRequired + +from langchain.agents.middleware._execution import ( + SHELL_TEMP_PREFIX, + BaseExecutionPolicy, + CodexSandboxExecutionPolicy, + DockerExecutionPolicy, + HostExecutionPolicy, +) +from langchain.agents.middleware._redaction import ( + PIIDetectionError, + PIIMatch, + RedactionRule, + ResolvedRedactionRule, +) +from langchain.agents.middleware.types import AgentMiddleware, AgentState, PrivateStateAttr + +if TYPE_CHECKING: + from collections.abc import Mapping, Sequence + + from langgraph.runtime import Runtime + from langgraph.types import Command + + from langchain.agents.middleware.types import ToolCallRequest + +LOGGER = logging.getLogger(__name__) +_DONE_MARKER_PREFIX = "__LC_SHELL_DONE__" + +DEFAULT_TOOL_DESCRIPTION = ( + "Execute a shell command inside a persistent session. Before running a command, " + "confirm the working directory is correct (e.g., inspect with `ls` or `pwd`) and ensure " + "any parent directories exist. Prefer absolute paths and quote paths containing spaces, " + 'such as `cd "/path/with spaces"`. Chain multiple commands with `&&` or `;` instead of ' + "embedding newlines. Avoid unnecessary `cd` usage unless explicitly required so the " + "session remains stable. Outputs may be truncated when they become very large, and long " + "running commands will be terminated once their configured timeout elapses." +) + + +def _cleanup_resources( + session: ShellSession, tempdir: tempfile.TemporaryDirectory[str] | None, timeout: float +) -> None: + with contextlib.suppress(Exception): + session.stop(timeout) + if tempdir is not None: + with contextlib.suppress(Exception): + tempdir.cleanup() + + +@dataclass +class _SessionResources: + """Container for per-run shell resources.""" + + session: ShellSession + tempdir: tempfile.TemporaryDirectory[str] | None + policy: BaseExecutionPolicy + _finalizer: weakref.finalize = field(init=False, repr=False) + + def __post_init__(self) -> None: + self._finalizer = weakref.finalize( + self, + _cleanup_resources, + self.session, + self.tempdir, + self.policy.termination_timeout, + ) + + +class ShellToolState(AgentState): + """Agent state extension for tracking shell session resources.""" + + shell_session_resources: NotRequired[ + Annotated[_SessionResources | None, UntrackedValue, PrivateStateAttr] + ] + + +@dataclass(frozen=True) +class CommandExecutionResult: + """Structured result from command execution.""" + + output: str + exit_code: int | None + timed_out: bool + truncated_by_lines: bool + truncated_by_bytes: bool + total_lines: int + total_bytes: int + + +class ShellSession: + """Persistent shell session that supports sequential command execution.""" + + def __init__( + self, + workspace: Path, + policy: BaseExecutionPolicy, + command: tuple[str, ...], + environment: Mapping[str, str], + ) -> None: + self._workspace = workspace + self._policy = policy + self._command = command + self._environment = dict(environment) + self._process: subprocess.Popen[str] | None = None + self._stdin: Any = None + self._queue: queue.Queue[tuple[str, str | None]] = queue.Queue() + self._lock = threading.Lock() + self._stdout_thread: threading.Thread | None = None + self._stderr_thread: threading.Thread | None = None + self._terminated = False + + def start(self) -> None: + """Start the shell subprocess and reader threads.""" + if self._process and self._process.poll() is None: + return + + self._process = self._policy.spawn( + workspace=self._workspace, + env=self._environment, + command=self._command, + ) + if ( + self._process.stdin is None + or self._process.stdout is None + or self._process.stderr is None + ): + msg = "Failed to initialize shell session pipes." + raise RuntimeError(msg) + + self._stdin = self._process.stdin + self._terminated = False + self._queue = queue.Queue() + + self._stdout_thread = threading.Thread( + target=self._enqueue_stream, + args=(self._process.stdout, "stdout"), + daemon=True, + ) + self._stderr_thread = threading.Thread( + target=self._enqueue_stream, + args=(self._process.stderr, "stderr"), + daemon=True, + ) + self._stdout_thread.start() + self._stderr_thread.start() + + def restart(self) -> None: + """Restart the shell process.""" + self.stop(self._policy.termination_timeout) + self.start() + + def stop(self, timeout: float) -> None: + """Stop the shell subprocess.""" + if not self._process: + return + + if self._process.poll() is None and not self._terminated: + try: + self._stdin.write("exit\n") + self._stdin.flush() + except (BrokenPipeError, OSError): + LOGGER.debug( + "Failed to write exit command; terminating shell session.", + exc_info=True, + ) + + try: + if self._process.wait(timeout=timeout) is None: + self._kill_process() + except subprocess.TimeoutExpired: + self._kill_process() + finally: + self._terminated = True + with contextlib.suppress(Exception): + self._stdin.close() + self._process = None + + def execute(self, command: str, *, timeout: float) -> CommandExecutionResult: + """Execute a command in the persistent shell.""" + if not self._process or self._process.poll() is not None: + msg = "Shell session is not running." + raise RuntimeError(msg) + + marker = f"{_DONE_MARKER_PREFIX}{uuid.uuid4().hex}" + deadline = time.monotonic() + timeout + + with self._lock: + self._drain_queue() + payload = command if command.endswith("\n") else f"{command}\n" + self._stdin.write(payload) + self._stdin.write(f"printf '{marker} %s\\n' $?\n") + self._stdin.flush() + + return self._collect_output(marker, deadline, timeout) + + def _collect_output( + self, + marker: str, + deadline: float, + timeout: float, + ) -> CommandExecutionResult: + collected: list[str] = [] + total_lines = 0 + total_bytes = 0 + truncated_by_lines = False + truncated_by_bytes = False + exit_code: int | None = None + timed_out = False + + while True: + remaining = deadline - time.monotonic() + if remaining <= 0: + timed_out = True + break + try: + source, data = self._queue.get(timeout=remaining) + except queue.Empty: + timed_out = True + break + + if data is None: + continue + + if source == "stdout" and data.startswith(marker): + _, _, status = data.partition(" ") + exit_code = self._safe_int(status.strip()) + break + + total_lines += 1 + encoded = data.encode("utf-8", "replace") + total_bytes += len(encoded) + + if total_lines > self._policy.max_output_lines: + truncated_by_lines = True + continue + + if ( + self._policy.max_output_bytes is not None + and total_bytes > self._policy.max_output_bytes + ): + truncated_by_bytes = True + continue + + if source == "stderr": + stripped = data.rstrip("\n") + collected.append(f"[stderr] {stripped}") + if data.endswith("\n"): + collected.append("\n") + else: + collected.append(data) + + if timed_out: + LOGGER.warning( + "Command timed out after %.2f seconds; restarting shell session.", + timeout, + ) + self.restart() + return CommandExecutionResult( + output="", + exit_code=None, + timed_out=True, + truncated_by_lines=truncated_by_lines, + truncated_by_bytes=truncated_by_bytes, + total_lines=total_lines, + total_bytes=total_bytes, + ) + + output = "".join(collected) + return CommandExecutionResult( + output=output, + exit_code=exit_code, + timed_out=False, + truncated_by_lines=truncated_by_lines, + truncated_by_bytes=truncated_by_bytes, + total_lines=total_lines, + total_bytes=total_bytes, + ) + + def _kill_process(self) -> None: + if not self._process: + return + + if hasattr(os, "killpg"): + with contextlib.suppress(ProcessLookupError): + os.killpg(os.getpgid(self._process.pid), signal.SIGKILL) + else: # pragma: no cover + with contextlib.suppress(ProcessLookupError): + self._process.kill() + + def _enqueue_stream(self, stream: Any, label: str) -> None: + for line in iter(stream.readline, ""): + self._queue.put((label, line)) + self._queue.put((label, None)) + + def _drain_queue(self) -> None: + while True: + try: + self._queue.get_nowait() + except queue.Empty: + break + + @staticmethod + def _safe_int(value: str) -> int | None: + with contextlib.suppress(ValueError): + return int(value) + return None + + +class _ShellToolInput(BaseModel): + """Input schema for the persistent shell tool.""" + + command: str | None = None + restart: bool | None = None + + @model_validator(mode="after") + def validate_payload(self) -> _ShellToolInput: + if self.command is None and not self.restart: + msg = "Shell tool requires either 'command' or 'restart'." + raise ValueError(msg) + if self.command is not None and self.restart: + msg = "Specify only one of 'command' or 'restart'." + raise ValueError(msg) + return self + + +class _PersistentShellTool(BaseTool): + """Tool wrapper that relies on middleware interception for execution.""" + + name: str = "shell" + description: str = DEFAULT_TOOL_DESCRIPTION + args_schema: type[BaseModel] = _ShellToolInput + + def __init__(self, middleware: ShellToolMiddleware, description: str | None = None) -> None: + super().__init__() + self._middleware = middleware + if description is not None: + self.description = description + + def _run(self, **_: Any) -> Any: # pragma: no cover - executed via middleware wrapper + msg = "Persistent shell tool execution should be intercepted via middleware wrappers." + raise RuntimeError(msg) + + +class ShellToolMiddleware(AgentMiddleware[ShellToolState, Any]): + """Middleware that registers a persistent shell tool for agents. + + The middleware exposes a single long-lived shell session. Use the execution policy to + match your deployment's security posture: + + * ``HostExecutionPolicy`` - full host access; best for trusted environments where the + agent already runs inside a container or VM that provides isolation. + * ``CodexSandboxExecutionPolicy`` - reuses the Codex CLI sandbox for additional + syscall/filesystem restrictions when the CLI is available. + * ``DockerExecutionPolicy`` - launches a separate Docker container for each agent run, + providing harder isolation, optional read-only root filesystems, and user remapping. + + When no policy is provided the middleware defaults to ``HostExecutionPolicy``. + """ + + state_schema = ShellToolState + + def __init__( + self, + workspace_root: str | Path | None = None, + *, + startup_commands: tuple[str, ...] | list[str] | str | None = None, + shutdown_commands: tuple[str, ...] | list[str] | str | None = None, + execution_policy: BaseExecutionPolicy | None = None, + redaction_rules: tuple[RedactionRule, ...] | list[RedactionRule] | None = None, + tool_description: str | None = None, + shell_command: Sequence[str] | str | None = None, + env: Mapping[str, Any] | None = None, + ) -> None: + """Initialize the middleware. + + Args: + workspace_root: Base directory for the shell session. If omitted, a temporary + directory is created when the agent starts and removed when it ends. + startup_commands: Optional commands executed sequentially after the session starts. + shutdown_commands: Optional commands executed before the session shuts down. + execution_policy: Execution policy controlling timeouts, output limits, and resource + configuration. Defaults to :class:`HostExecutionPolicy` for native execution. + redaction_rules: Optional redaction rules to sanitize command output before + returning it to the model. + tool_description: Optional override for the registered shell tool description. + shell_command: Optional shell executable (string) or argument sequence used to + launch the persistent session. Defaults to an implementation-defined bash command. + env: Optional environment variables to supply to the shell session. Values are + coerced to strings before command execution. If omitted, the session inherits the + parent process environment. + """ + super().__init__() + self._workspace_root = Path(workspace_root) if workspace_root else None + self._shell_command = self._normalize_shell_command(shell_command) + self._environment = self._normalize_env(env) + if execution_policy is not None: + self._execution_policy = execution_policy + else: + self._execution_policy = HostExecutionPolicy() + rules = redaction_rules or () + self._redaction_rules: tuple[ResolvedRedactionRule, ...] = tuple( + rule.resolve() for rule in rules + ) + self._startup_commands = self._normalize_commands(startup_commands) + self._shutdown_commands = self._normalize_commands(shutdown_commands) + + description = tool_description or DEFAULT_TOOL_DESCRIPTION + self._tool = _PersistentShellTool(self, description=description) + self.tools = [self._tool] + + @staticmethod + def _normalize_commands( + commands: tuple[str, ...] | list[str] | str | None, + ) -> tuple[str, ...]: + if commands is None: + return () + if isinstance(commands, str): + return (commands,) + return tuple(commands) + + @staticmethod + def _normalize_shell_command( + shell_command: Sequence[str] | str | None, + ) -> tuple[str, ...]: + if shell_command is None: + return ("/bin/bash",) + normalized = (shell_command,) if isinstance(shell_command, str) else tuple(shell_command) + if not normalized: + msg = "Shell command must contain at least one argument." + raise ValueError(msg) + return normalized + + @staticmethod + def _normalize_env(env: Mapping[str, Any] | None) -> dict[str, str] | None: + if env is None: + return None + normalized: dict[str, str] = {} + for key, value in env.items(): + if not isinstance(key, str): + msg = "Environment variable names must be strings." + raise TypeError(msg) + normalized[key] = str(value) + return normalized + + def before_agent(self, state: ShellToolState, runtime: Runtime) -> dict[str, Any] | None: # noqa: ARG002 + """Start the shell session and run startup commands.""" + resources = self._create_resources() + return {"shell_session_resources": resources} + + async def abefore_agent(self, state: ShellToolState, runtime: Runtime) -> dict[str, Any] | None: + """Async counterpart to `before_agent`.""" + return self.before_agent(state, runtime) + + def after_agent(self, state: ShellToolState, runtime: Runtime) -> None: # noqa: ARG002 + """Run shutdown commands and release resources when an agent completes.""" + resources = self._ensure_resources(state) + try: + self._run_shutdown_commands(resources.session) + finally: + resources._finalizer() + + async def aafter_agent(self, state: ShellToolState, runtime: Runtime) -> None: + """Async counterpart to `after_agent`.""" + return self.after_agent(state, runtime) + + def _ensure_resources(self, state: ShellToolState) -> _SessionResources: + resources = state.get("shell_session_resources") + if resources is not None and not isinstance(resources, _SessionResources): + resources = None + if resources is None: + msg = ( + "Shell session resources are unavailable. Ensure `before_agent` ran successfully " + "before invoking the shell tool." + ) + raise ToolException(msg) + return resources + + def _create_resources(self) -> _SessionResources: + workspace = self._workspace_root + tempdir: tempfile.TemporaryDirectory[str] | None = None + if workspace is None: + tempdir = tempfile.TemporaryDirectory(prefix=SHELL_TEMP_PREFIX) + workspace_path = Path(tempdir.name) + else: + workspace_path = workspace + workspace_path.mkdir(parents=True, exist_ok=True) + + session = ShellSession( + workspace_path, + self._execution_policy, + self._shell_command, + self._environment or {}, + ) + try: + session.start() + LOGGER.info("Started shell session in %s", workspace_path) + self._run_startup_commands(session) + except BaseException: + LOGGER.exception("Starting shell session failed; cleaning up resources.") + session.stop(self._execution_policy.termination_timeout) + if tempdir is not None: + tempdir.cleanup() + raise + + return _SessionResources(session=session, tempdir=tempdir, policy=self._execution_policy) + + def _run_startup_commands(self, session: ShellSession) -> None: + if not self._startup_commands: + return + for command in self._startup_commands: + result = session.execute(command, timeout=self._execution_policy.startup_timeout) + if result.timed_out or (result.exit_code not in (0, None)): + msg = f"Startup command '{command}' failed with exit code {result.exit_code}" + raise RuntimeError(msg) + + def _run_shutdown_commands(self, session: ShellSession) -> None: + if not self._shutdown_commands: + return + for command in self._shutdown_commands: + try: + result = session.execute(command, timeout=self._execution_policy.command_timeout) + if result.timed_out: + LOGGER.warning("Shutdown command '%s' timed out.", command) + elif result.exit_code not in (0, None): + LOGGER.warning( + "Shutdown command '%s' exited with %s.", command, result.exit_code + ) + except (RuntimeError, ToolException, OSError) as exc: + LOGGER.warning( + "Failed to run shutdown command '%s': %s", command, exc, exc_info=True + ) + + def _apply_redactions(self, content: str) -> tuple[str, dict[str, list[PIIMatch]]]: + """Apply configured redaction rules to command output.""" + matches_by_type: dict[str, list[PIIMatch]] = {} + updated = content + for rule in self._redaction_rules: + updated, matches = rule.apply(updated) + if matches: + matches_by_type.setdefault(rule.pii_type, []).extend(matches) + return updated, matches_by_type + + def _run_shell_tool( + self, + resources: _SessionResources, + payload: dict[str, Any], + *, + tool_call_id: str | None, + ) -> Any: + session = resources.session + + if payload.get("restart"): + LOGGER.info("Restarting shell session on request.") + try: + session.restart() + self._run_startup_commands(session) + except BaseException as err: + LOGGER.exception("Restarting shell session failed; session remains unavailable.") + msg = "Failed to restart shell session." + raise ToolException(msg) from err + message = "Shell session restarted." + return self._format_tool_message(message, tool_call_id, status="success") + + command = payload.get("command") + if not command or not isinstance(command, str): + msg = "Shell tool expects a 'command' string when restart is not requested." + raise ToolException(msg) + + LOGGER.info("Executing shell command: %s", command) + result = session.execute(command, timeout=self._execution_policy.command_timeout) + + if result.timed_out: + timeout_seconds = self._execution_policy.command_timeout + message = f"Error: Command timed out after {timeout_seconds:.1f} seconds." + return self._format_tool_message( + message, + tool_call_id, + status="error", + artifact={ + "timed_out": True, + "exit_code": None, + }, + ) + + try: + sanitized_output, matches = self._apply_redactions(result.output) + except PIIDetectionError as error: + LOGGER.warning("Blocking command output due to detected %s.", error.pii_type) + message = f"Output blocked: detected {error.pii_type}." + return self._format_tool_message( + message, + tool_call_id, + status="error", + artifact={ + "timed_out": False, + "exit_code": result.exit_code, + "matches": {error.pii_type: error.matches}, + }, + ) + + sanitized_output = sanitized_output or "" + if result.truncated_by_lines: + sanitized_output = ( + f"{sanitized_output.rstrip()}\n\n" + f"... Output truncated at {self._execution_policy.max_output_lines} lines " + f"(observed {result.total_lines})." + ) + if result.truncated_by_bytes and self._execution_policy.max_output_bytes is not None: + sanitized_output = ( + f"{sanitized_output.rstrip()}\n\n" + f"... Output truncated at {self._execution_policy.max_output_bytes} bytes " + f"(observed {result.total_bytes})." + ) + + if result.exit_code not in (0, None): + sanitized_output = f"{sanitized_output.rstrip()}\n\nExit code: {result.exit_code}" + final_status: Literal["success", "error"] = "error" + else: + final_status = "success" + + artifact = { + "timed_out": False, + "exit_code": result.exit_code, + "truncated_by_lines": result.truncated_by_lines, + "truncated_by_bytes": result.truncated_by_bytes, + "total_lines": result.total_lines, + "total_bytes": result.total_bytes, + "redaction_matches": matches, + } + + return self._format_tool_message( + sanitized_output, + tool_call_id, + status=final_status, + artifact=artifact, + ) + + def wrap_tool_call( + self, + request: ToolCallRequest, + handler: typing.Callable[[ToolCallRequest], ToolMessage | Command], + ) -> ToolMessage | Command: + """Intercept local shell tool calls and execute them via the managed session.""" + if isinstance(request.tool, _PersistentShellTool): + resources = self._ensure_resources(request.state) + return self._run_shell_tool( + resources, + request.tool_call["args"], + tool_call_id=request.tool_call.get("id"), + ) + return handler(request) + + async def awrap_tool_call( + self, + request: ToolCallRequest, + handler: typing.Callable[[ToolCallRequest], typing.Awaitable[ToolMessage | Command]], + ) -> ToolMessage | Command: + """Async interception mirroring the synchronous tool handler.""" + if isinstance(request.tool, _PersistentShellTool): + resources = self._ensure_resources(request.state) + return self._run_shell_tool( + resources, + request.tool_call["args"], + tool_call_id=request.tool_call.get("id"), + ) + return await handler(request) + + def _format_tool_message( + self, + content: str, + tool_call_id: str | None, + *, + status: Literal["success", "error"], + artifact: dict[str, Any] | None = None, + ) -> ToolMessage | str: + artifact = artifact or {} + if tool_call_id is None: + return content + return ToolMessage( + content=content, + tool_call_id=tool_call_id, + name=self._tool.name, + status=status, + artifact=artifact, + ) + + +__all__ = [ + "CodexSandboxExecutionPolicy", + "DockerExecutionPolicy", + "HostExecutionPolicy", + "RedactionRule", + "ShellToolMiddleware", +] diff --git a/libs/langchain_v1/langchain/agents/middleware/summarization.py b/libs/langchain_v1/langchain/agents/middleware/summarization.py index de59095be1c..6ba6221206c 100644 --- a/libs/langchain_v1/langchain/agents/middleware/summarization.py +++ b/libs/langchain_v1/langchain/agents/middleware/summarization.py @@ -60,7 +60,7 @@ _SEARCH_RANGE_FOR_TOOL_PAIRS = 5 class SummarizationMiddleware(AgentMiddleware): - """Middleware that summarizes conversation history when token limits are approached. + """Summarizes conversation history when token limits are approached. This middleware monitors message token counts and automatically summarizes older messages when a threshold is reached, preserving recent messages and maintaining diff --git a/libs/langchain_v1/langchain/agents/middleware/planning.py b/libs/langchain_v1/langchain/agents/middleware/todo.py similarity index 76% rename from libs/langchain_v1/langchain/agents/middleware/planning.py rename to libs/langchain_v1/langchain/agents/middleware/todo.py index 3278ed8f125..c2b1b75d05c 100644 --- a/libs/langchain_v1/langchain/agents/middleware/planning.py +++ b/libs/langchain_v1/langchain/agents/middleware/todo.py @@ -6,14 +6,21 @@ from __future__ import annotations from typing import TYPE_CHECKING, Annotated, Literal if TYPE_CHECKING: - from collections.abc import Callable + from collections.abc import Awaitable, Callable -from langchain_core.messages import AIMessage, ToolMessage +from langchain_core.messages import SystemMessage, ToolMessage from langchain_core.tools import tool from langgraph.types import Command from typing_extensions import NotRequired, TypedDict -from langchain.agents.middleware.types import AgentMiddleware, AgentState, ModelRequest +from langchain.agents.middleware.types import ( + AgentMiddleware, + AgentState, + ModelCallResult, + ModelRequest, + ModelResponse, + OmitFromInput, +) from langchain.tools import InjectedToolCallId @@ -30,7 +37,7 @@ class Todo(TypedDict): class PlanningState(AgentState): """State schema for the todo middleware.""" - todos: NotRequired[list[Todo]] + todos: Annotated[NotRequired[list[Todo]], OmitFromInput] """List of todo items for tracking task progress.""" @@ -120,7 +127,7 @@ def write_todos(todos: list[Todo], tool_call_id: Annotated[str, InjectedToolCall ) -class PlanningMiddleware(AgentMiddleware): +class TodoListMiddleware(AgentMiddleware): """Middleware that provides todo list management capabilities to agents. This middleware adds a `write_todos` tool that allows agents to create and manage @@ -133,10 +140,10 @@ class PlanningMiddleware(AgentMiddleware): Example: ```python - from langchain.agents.middleware.planning import PlanningMiddleware + from langchain.agents.middleware.todo import TodoListMiddleware from langchain.agents import create_agent - agent = create_agent("openai:gpt-4o", middleware=[PlanningMiddleware()]) + agent = create_agent("openai:gpt-4o", middleware=[TodoListMiddleware()]) # Agent now has access to write_todos tool and todo state tracking result = await agent.invoke({"messages": [HumanMessage("Help me refactor my codebase")]}) @@ -159,7 +166,7 @@ class PlanningMiddleware(AgentMiddleware): system_prompt: str = WRITE_TODOS_SYSTEM_PROMPT, tool_description: str = WRITE_TODOS_TOOL_DESCRIPTION, ) -> None: - """Initialize the PlanningMiddleware with optional custom prompts. + """Initialize the TodoListMiddleware with optional custom prompts. Args: system_prompt: Custom system prompt to guide the agent on using the todo tool. @@ -189,12 +196,47 @@ class PlanningMiddleware(AgentMiddleware): def wrap_model_call( self, request: ModelRequest, - handler: Callable[[ModelRequest], AIMessage], - ) -> AIMessage: + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelCallResult: """Update the system prompt to include the todo system prompt.""" - request.system_prompt = ( - request.system_prompt + "\n\n" + self.system_prompt - if request.system_prompt - else self.system_prompt - ) + if request.system_prompt is None: + request.system_prompt = self.system_prompt + elif isinstance(request.system_prompt, str): + request.system_prompt = request.system_prompt + "\n\n" + self.system_prompt + elif isinstance(request.system_prompt, SystemMessage) and isinstance( + request.system_prompt.content, str + ): + request.system_prompt = SystemMessage( + content=request.system_prompt.content + self.system_prompt + ) + elif isinstance(request.system_prompt, SystemMessage) and isinstance( + request.system_prompt.content, list + ): + request.system_prompt = SystemMessage( + content=[*request.system_prompt.content, self.system_prompt] + ) return handler(request) + + async def awrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelCallResult: + """Update the system prompt to include the todo system prompt (async version).""" + if request.system_prompt is None: + request.system_prompt = self.system_prompt + elif isinstance(request.system_prompt, str): + request.system_prompt = request.system_prompt + "\n\n" + self.system_prompt + elif isinstance(request.system_prompt, SystemMessage) and isinstance( + request.system_prompt.content, str + ): + request.system_prompt = SystemMessage( + content=request.system_prompt.content + self.system_prompt + ) + elif isinstance(request.system_prompt, SystemMessage) and isinstance( + request.system_prompt.content, list + ): + request.system_prompt = SystemMessage( + content=[*request.system_prompt.content, self.system_prompt] + ) + return await handler(request) diff --git a/libs/langchain_v1/langchain/agents/middleware/tool_call_limit.py b/libs/langchain_v1/langchain/agents/middleware/tool_call_limit.py index 52c5d488bee..686ca06ab81 100644 --- a/libs/langchain_v1/langchain/agents/middleware/tool_call_limit.py +++ b/libs/langchain_v1/langchain/agents/middleware/tool_call_limit.py @@ -2,71 +2,78 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Literal +from typing import TYPE_CHECKING, Annotated, Any, Generic, Literal -from langchain_core.messages import AIMessage, AnyMessage, HumanMessage +from langchain_core.messages import AIMessage, ToolCall, ToolMessage +from langgraph.channels.untracked_value import UntrackedValue +from langgraph.typing import ContextT +from typing_extensions import NotRequired -from langchain.agents.middleware.types import AgentMiddleware, AgentState, hook_config +from langchain.agents.middleware.types import ( + AgentMiddleware, + AgentState, + PrivateStateAttr, + ResponseT, + hook_config, +) if TYPE_CHECKING: from langgraph.runtime import Runtime +ExitBehavior = Literal["continue", "error", "end"] +"""How to handle execution when tool call limits are exceeded. -def _count_tool_calls_in_messages(messages: list[AnyMessage], tool_name: str | None = None) -> int: - """Count tool calls in a list of messages. +- `"continue"`: Block exceeded tools with error messages, let other tools continue (default) +- `"error"`: Raise a `ToolCallLimitExceededError` exception +- `"end"`: Stop execution immediately, injecting a ToolMessage and an AI message + for the single tool call that exceeded the limit. Raises `NotImplementedError` + if there are other pending tool calls (due to parallel tool calling). +""" + + +class ToolCallLimitState(AgentState[ResponseT], Generic[ResponseT]): + """State schema for ToolCallLimitMiddleware. + + Extends AgentState with tool call tracking fields. + + The count fields are dictionaries mapping tool names to execution counts. + This allows multiple middleware instances to track different tools independently. + The special key "__all__" is used for tracking all tool calls globally. + """ + + thread_tool_call_count: NotRequired[Annotated[dict[str, int], PrivateStateAttr]] + run_tool_call_count: NotRequired[Annotated[dict[str, int], UntrackedValue, PrivateStateAttr]] + + +def _build_tool_message_content(tool_name: str | None) -> str: + """Build the error message content for ToolMessage when limit is exceeded. + + This message is sent to the model, so it should not reference thread/run concepts + that the model has no notion of. Args: - messages: List of messages to count tool calls in. - tool_name: If specified, only count calls to this specific tool. - If `None`, count all tool calls. + tool_name: Tool name being limited (if specific tool), or None for all tools. Returns: - The total number of tool calls (optionally filtered by tool_name). + A concise message instructing the model not to call the tool again. """ - count = 0 - for message in messages: - if isinstance(message, AIMessage) and message.tool_calls: - if tool_name is None: - # Count all tool calls - count += len(message.tool_calls) - else: - # Count only calls to the specified tool - count += sum(1 for tc in message.tool_calls if tc["name"] == tool_name) - return count + # Always instruct the model not to call again, regardless of which limit was hit + if tool_name: + return f"Tool call limit exceeded. Do not call '{tool_name}' again." + return "Tool call limit exceeded. Do not make additional tool calls." -def _get_run_messages(messages: list[AnyMessage]) -> list[AnyMessage]: - """Get messages from the current run (after the last HumanMessage). - - Args: - messages: Full list of messages. - - Returns: - Messages from the current run (after last HumanMessage). - """ - # Find the last HumanMessage - last_human_index = -1 - for i in range(len(messages) - 1, -1, -1): - if isinstance(messages[i], HumanMessage): - last_human_index = i - break - - # If no HumanMessage found, return all messages - if last_human_index == -1: - return messages - - # Return messages after the last HumanMessage - return messages[last_human_index + 1 :] - - -def _build_tool_limit_exceeded_message( +def _build_final_ai_message_content( thread_count: int, run_count: int, thread_limit: int | None, run_limit: int | None, tool_name: str | None, ) -> str: - """Build a message indicating which tool call limits were exceeded. + """Build the final AI message content for 'end' behavior. + + This message is displayed to the user, so it should include detailed information + about which limits were exceeded. Args: thread_count: Current thread tool call count. @@ -78,14 +85,16 @@ def _build_tool_limit_exceeded_message( Returns: A formatted message describing which limits were exceeded. """ - tool_desc = f"'{tool_name}' tool call" if tool_name else "Tool call" + tool_desc = f"'{tool_name}' tool" if tool_name else "Tool" exceeded_limits = [] - if thread_limit is not None and thread_count >= thread_limit: - exceeded_limits.append(f"thread limit ({thread_count}/{thread_limit})") - if run_limit is not None and run_count >= run_limit: - exceeded_limits.append(f"run limit ({run_count}/{run_limit})") - return f"{tool_desc} limits exceeded: {', '.join(exceeded_limits)}" + if thread_limit is not None and thread_count > thread_limit: + exceeded_limits.append(f"thread limit exceeded ({thread_count}/{thread_limit} calls)") + if run_limit is not None and run_count > run_limit: + exceeded_limits.append(f"run limit exceeded ({run_count}/{run_limit} calls)") + + limits_text = " and ".join(exceeded_limits) + return f"{tool_desc} call limit reached: {limits_text}." class ToolCallLimitExceededError(Exception): @@ -118,52 +127,78 @@ class ToolCallLimitExceededError(Exception): self.run_limit = run_limit self.tool_name = tool_name - msg = _build_tool_limit_exceeded_message( + msg = _build_final_ai_message_content( thread_count, run_count, thread_limit, run_limit, tool_name ) super().__init__(msg) -class ToolCallLimitMiddleware(AgentMiddleware): - """Middleware that tracks tool call counts and enforces limits. +class ToolCallLimitMiddleware( + AgentMiddleware[ToolCallLimitState[ResponseT], ContextT], + Generic[ResponseT, ContextT], +): + """Track tool call counts and enforces limits during agent execution. - This middleware monitors the number of tool calls made during agent execution - and can terminate the agent when specified limits are reached. It supports - both thread-level and run-level call counting with configurable exit behaviors. + This middleware monitors the number of tool calls made and can terminate or + restrict execution when limits are exceeded. It supports both thread-level + (persistent across runs) and run-level (per invocation) call counting. - Thread-level: The middleware counts all tool calls in the entire message history - and persists this count across multiple runs (invocations) of the agent. + Configuration: + - `exit_behavior`: How to handle when limits are exceeded + - `"continue"`: Block exceeded tools, let execution continue (default) + - `"error"`: Raise an exception + - `"end"`: Stop immediately with a ToolMessage + AI message for the single + tool call that exceeded the limit (raises `NotImplementedError` if there + are other pending tool calls (due to parallel tool calling). - Run-level: The middleware counts tool calls made after the last HumanMessage, - representing the current run (invocation) of the agent. - - Example: + Examples: + Continue execution with blocked tools (default): ```python from langchain.agents.middleware.tool_call_limit import ToolCallLimitMiddleware from langchain.agents import create_agent - # Limit all tool calls globally - global_limiter = ToolCallLimitMiddleware(thread_limit=20, run_limit=10, exit_behavior="end") - - # Limit a specific tool - search_limiter = ToolCallLimitMiddleware( - tool_name="search", thread_limit=5, run_limit=3, exit_behavior="end" + # Block exceeded tools but let other tools and model continue + limiter = ToolCallLimitMiddleware( + thread_limit=20, + run_limit=10, + exit_behavior="continue", # default ) - # Use both in the same agent - agent = create_agent("openai:gpt-4o", middleware=[global_limiter, search_limiter]) - - result = await agent.invoke({"messages": [HumanMessage("Help me with a task")]}) + agent = create_agent("openai:gpt-4o", middleware=[limiter]) ``` + + Stop immediately when limit exceeded: + ```python + # End execution immediately with an AI message + limiter = ToolCallLimitMiddleware(run_limit=5, exit_behavior="end") + + agent = create_agent("openai:gpt-4o", middleware=[limiter]) + ``` + + Raise exception on limit: + ```python + # Strict limit with exception handling + limiter = ToolCallLimitMiddleware(tool_name="search", thread_limit=5, exit_behavior="error") + + agent = create_agent("openai:gpt-4o", middleware=[limiter]) + + try: + result = await agent.invoke({"messages": [HumanMessage("Task")]}) + except ToolCallLimitExceededError as e: + print(f"Search limit exceeded: {e}") + ``` + """ + state_schema = ToolCallLimitState # type: ignore[assignment] + def __init__( self, *, tool_name: str | None = None, thread_limit: int | None = None, run_limit: int | None = None, - exit_behavior: Literal["end", "error"] = "end", + exit_behavior: ExitBehavior = "continue", ) -> None: """Initialize the tool call limit middleware. @@ -171,17 +206,21 @@ class ToolCallLimitMiddleware(AgentMiddleware): tool_name: Name of the specific tool to limit. If `None`, limits apply to all tools. Defaults to `None`. thread_limit: Maximum number of tool calls allowed per thread. - None means no limit. Defaults to `None`. + `None` means no limit. Defaults to `None`. run_limit: Maximum number of tool calls allowed per run. - None means no limit. Defaults to `None`. - exit_behavior: What to do when limits are exceeded. - - "end": Jump to the end of the agent execution and - inject an artificial AI message indicating that the limit was exceeded. - - "error": Raise a ToolCallLimitExceededError - Defaults to "end". + `None` means no limit. Defaults to `None`. + exit_behavior: How to handle when limits are exceeded. + - `"continue"`: Block exceeded tools with error messages, let other + tools continue. Model decides when to end. (default) + - `"error"`: Raise a `ToolCallLimitExceededError` exception + - `"end"`: Stop execution immediately with a ToolMessage + AI message + for the single tool call that exceeded the limit. Raises + `NotImplementedError` if there are multiple parallel tool + calls to other tools or multiple pending tool calls. Raises: - ValueError: If both limits are None or if exit_behavior is invalid. + ValueError: If both limits are `None`, if exit_behavior is invalid, + or if run_limit exceeds thread_limit. """ super().__init__() @@ -189,8 +228,16 @@ class ToolCallLimitMiddleware(AgentMiddleware): msg = "At least one limit must be specified (thread_limit or run_limit)" raise ValueError(msg) - if exit_behavior not in ("end", "error"): - msg = f"Invalid exit_behavior: {exit_behavior}. Must be 'end' or 'error'" + valid_behaviors = ("continue", "error", "end") + if exit_behavior not in valid_behaviors: + msg = f"Invalid exit_behavior: {exit_behavior!r}. Must be one of {valid_behaviors}" + raise ValueError(msg) + + if thread_limit is not None and run_limit is not None and run_limit > thread_limit: + msg = ( + f"run_limit ({run_limit}) cannot exceed thread_limit ({thread_limit}). " + "The run limit should be less than or equal to the thread limit." + ) raise ValueError(msg) self.tool_name = tool_name @@ -210,51 +257,198 @@ class ToolCallLimitMiddleware(AgentMiddleware): return f"{base_name}[{self.tool_name}]" return base_name - @hook_config(can_jump_to=["end"]) - def before_model(self, state: AgentState, runtime: Runtime) -> dict[str, Any] | None: # noqa: ARG002 - """Check tool call limits before making a model call. + def _would_exceed_limit(self, thread_count: int, run_count: int) -> bool: + """Check if incrementing the counts would exceed any configured limit. Args: - state: The current agent state containing messages. + thread_count: Current thread call count. + run_count: Current run call count. + + Returns: + True if either limit would be exceeded by one more call. + """ + return (self.thread_limit is not None and thread_count + 1 > self.thread_limit) or ( + self.run_limit is not None and run_count + 1 > self.run_limit + ) + + def _matches_tool_filter(self, tool_call: ToolCall) -> bool: + """Check if a tool call matches this middleware's tool filter. + + Args: + tool_call: The tool call to check. + + Returns: + True if this middleware should track this tool call. + """ + return self.tool_name is None or tool_call["name"] == self.tool_name + + def _separate_tool_calls( + self, tool_calls: list[ToolCall], thread_count: int, run_count: int + ) -> tuple[list[ToolCall], list[ToolCall], int, int]: + """Separate tool calls into allowed and blocked based on limits. + + Args: + tool_calls: List of tool calls to evaluate. + thread_count: Current thread call count. + run_count: Current run call count. + + Returns: + Tuple of (allowed_calls, blocked_calls, final_thread_count, final_run_count). + """ + allowed_calls: list[ToolCall] = [] + blocked_calls: list[ToolCall] = [] + temp_thread_count = thread_count + temp_run_count = run_count + + for tool_call in tool_calls: + if not self._matches_tool_filter(tool_call): + continue + + if self._would_exceed_limit(temp_thread_count, temp_run_count): + blocked_calls.append(tool_call) + else: + allowed_calls.append(tool_call) + temp_thread_count += 1 + temp_run_count += 1 + + return allowed_calls, blocked_calls, temp_thread_count, temp_run_count + + @hook_config(can_jump_to=["end"]) + def after_model( + self, + state: ToolCallLimitState[ResponseT], + runtime: Runtime[ContextT], # noqa: ARG002 + ) -> dict[str, Any] | None: + """Increment tool call counts after a model call and check limits. + + Args: + state: The current agent state. runtime: The langgraph runtime. Returns: - If limits are exceeded and exit_behavior is "end", returns - a Command to jump to the end with a limit exceeded message. Otherwise returns None. + State updates with incremented tool call counts. If limits are exceeded + and exit_behavior is "end", also includes a jump to end with a ToolMessage + and AI message for the single exceeded tool call. Raises: ToolCallLimitExceededError: If limits are exceeded and exit_behavior is "error". + NotImplementedError: If limits are exceeded, exit_behavior is "end", + and there are multiple tool calls. """ + # Get the last AIMessage to check for tool calls messages = state.get("messages", []) + if not messages: + return None - # Count tool calls in entire thread - thread_count = _count_tool_calls_in_messages(messages, self.tool_name) + # Find the last AIMessage + last_ai_message = None + for message in reversed(messages): + if isinstance(message, AIMessage): + last_ai_message = message + break - # Count tool calls in current run (after last HumanMessage) - run_messages = _get_run_messages(messages) - run_count = _count_tool_calls_in_messages(run_messages, self.tool_name) + if not last_ai_message or not last_ai_message.tool_calls: + return None - # Check if any limits are exceeded - thread_limit_exceeded = self.thread_limit is not None and thread_count >= self.thread_limit - run_limit_exceeded = self.run_limit is not None and run_count >= self.run_limit + # Get the count key for this middleware instance + count_key = self.tool_name if self.tool_name else "__all__" - if thread_limit_exceeded or run_limit_exceeded: - if self.exit_behavior == "error": - raise ToolCallLimitExceededError( - thread_count=thread_count, - run_count=run_count, - thread_limit=self.thread_limit, - run_limit=self.run_limit, - tool_name=self.tool_name, + # Get current counts + thread_counts = state.get("thread_tool_call_count", {}).copy() + run_counts = state.get("run_tool_call_count", {}).copy() + current_thread_count = thread_counts.get(count_key, 0) + current_run_count = run_counts.get(count_key, 0) + + # Separate tool calls into allowed and blocked + allowed_calls, blocked_calls, new_thread_count, new_run_count = self._separate_tool_calls( + last_ai_message.tool_calls, current_thread_count, current_run_count + ) + + # Update counts to include only allowed calls for thread count + # (blocked calls don't count towards thread-level tracking) + # But run count includes blocked calls since they were attempted in this run + thread_counts[count_key] = new_thread_count + run_counts[count_key] = new_run_count + len(blocked_calls) + + # If no tool calls are blocked, just update counts + if not blocked_calls: + if allowed_calls: + return { + "thread_tool_call_count": thread_counts, + "run_tool_call_count": run_counts, + } + return None + + # Get final counts for building messages + final_thread_count = thread_counts[count_key] + final_run_count = run_counts[count_key] + + # Handle different exit behaviors + if self.exit_behavior == "error": + # Use hypothetical thread count to show which limit was exceeded + hypothetical_thread_count = final_thread_count + len(blocked_calls) + raise ToolCallLimitExceededError( + thread_count=hypothetical_thread_count, + run_count=final_run_count, + thread_limit=self.thread_limit, + run_limit=self.run_limit, + tool_name=self.tool_name, + ) + + # Build tool message content (sent to model - no thread/run details) + tool_msg_content = _build_tool_message_content(self.tool_name) + + # Inject artificial error ToolMessages for blocked tool calls + artificial_messages: list[ToolMessage | AIMessage] = [ + ToolMessage( + content=tool_msg_content, + tool_call_id=tool_call["id"], + name=tool_call.get("name"), + status="error", + ) + for tool_call in blocked_calls + ] + + if self.exit_behavior == "end": + # Check if there are tool calls to other tools that would continue executing + other_tools = [ + tc + for tc in last_ai_message.tool_calls + if self.tool_name is not None and tc["name"] != self.tool_name + ] + + if other_tools: + tool_names = ", ".join({tc["name"] for tc in other_tools}) + msg = ( + f"Cannot end execution with other tool calls pending. " + f"Found calls to: {tool_names}. Use 'continue' or 'error' behavior instead." ) - if self.exit_behavior == "end": - # Create a message indicating the limit was exceeded - limit_message = _build_tool_limit_exceeded_message( - thread_count, run_count, self.thread_limit, self.run_limit, self.tool_name - ) - limit_ai_message = AIMessage(content=limit_message) + raise NotImplementedError(msg) - return {"jump_to": "end", "messages": [limit_ai_message]} + # Build final AI message content (displayed to user - includes thread/run details) + # Use hypothetical thread count (what it would have been if call wasn't blocked) + # to show which limit was actually exceeded + hypothetical_thread_count = final_thread_count + len(blocked_calls) + final_msg_content = _build_final_ai_message_content( + hypothetical_thread_count, + final_run_count, + self.thread_limit, + self.run_limit, + self.tool_name, + ) + artificial_messages.append(AIMessage(content=final_msg_content)) - return None + return { + "thread_tool_call_count": thread_counts, + "run_tool_call_count": run_counts, + "jump_to": "end", + "messages": artificial_messages, + } + + # For exit_behavior="continue", return error messages to block exceeded tools + return { + "thread_tool_call_count": thread_counts, + "run_tool_call_count": run_counts, + "messages": artificial_messages, + } diff --git a/libs/langchain_v1/langchain/agents/middleware/tool_emulator.py b/libs/langchain_v1/langchain/agents/middleware/tool_emulator.py new file mode 100644 index 00000000000..90018150e44 --- /dev/null +++ b/libs/langchain_v1/langchain/agents/middleware/tool_emulator.py @@ -0,0 +1,200 @@ +"""Tool emulator middleware for testing.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from langchain_core.language_models.chat_models import BaseChatModel +from langchain_core.messages import HumanMessage, ToolMessage + +from langchain.agents.middleware.types import AgentMiddleware +from langchain.chat_models.base import init_chat_model + +if TYPE_CHECKING: + from collections.abc import Awaitable, Callable + + from langgraph.types import Command + + from langchain.agents.middleware.types import ToolCallRequest + from langchain.tools import BaseTool + + +class LLMToolEmulator(AgentMiddleware): + """Emulates specified tools using an LLM instead of executing them. + + This middleware allows selective emulation of tools for testing purposes. + By default (when tools=None), all tools are emulated. You can specify which + tools to emulate by passing a list of tool names or BaseTool instances. + + Examples: + Emulate all tools (default behavior): + ```python + from langchain.agents.middleware import LLMToolEmulator + + middleware = LLMToolEmulator() + + agent = create_agent( + model="openai:gpt-4o", + tools=[get_weather, get_user_location, calculator], + middleware=[middleware], + ) + ``` + + Emulate specific tools by name: + ```python + middleware = LLMToolEmulator(tools=["get_weather", "get_user_location"]) + ``` + + Use a custom model for emulation: + ```python + middleware = LLMToolEmulator( + tools=["get_weather"], model="anthropic:claude-sonnet-4-5-20250929" + ) + ``` + + Emulate specific tools by passing tool instances: + ```python + middleware = LLMToolEmulator(tools=[get_weather, get_user_location]) + ``` + """ + + def __init__( + self, + *, + tools: list[str | BaseTool] | None = None, + model: str | BaseChatModel | None = None, + ) -> None: + """Initialize the tool emulator. + + Args: + tools: List of tool names (str) or BaseTool instances to emulate. + If None (default), ALL tools will be emulated. + If empty list, no tools will be emulated. + model: Model to use for emulation. + Defaults to "anthropic:claude-sonnet-4-5-20250929". + Can be a model identifier string or BaseChatModel instance. + """ + super().__init__() + + # Extract tool names from tools + # None means emulate all tools + self.emulate_all = tools is None + self.tools_to_emulate: set[str] = set() + + if not self.emulate_all and tools is not None: + for tool in tools: + if isinstance(tool, str): + self.tools_to_emulate.add(tool) + else: + # Assume BaseTool with .name attribute + self.tools_to_emulate.add(tool.name) + + # Initialize emulator model + if model is None: + self.model = init_chat_model("anthropic:claude-sonnet-4-5-20250929", temperature=1) + elif isinstance(model, BaseChatModel): + self.model = model + else: + self.model = init_chat_model(model, temperature=1) + + def wrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], ToolMessage | Command], + ) -> ToolMessage | Command: + """Emulate tool execution using LLM if tool should be emulated. + + Args: + request: Tool call request to potentially emulate. + handler: Callback to execute the tool (can be called multiple times). + + Returns: + ToolMessage with emulated response if tool should be emulated, + otherwise calls handler for normal execution. + """ + tool_name = request.tool_call["name"] + + # Check if this tool should be emulated + should_emulate = self.emulate_all or tool_name in self.tools_to_emulate + + if not should_emulate: + # Let it execute normally by calling the handler + return handler(request) + + # Extract tool information for emulation + tool_args = request.tool_call["args"] + tool_description = request.tool.description if request.tool else "No description available" + + # Build prompt for emulator LLM + prompt = ( + f"You are emulating a tool call for testing purposes.\n\n" + f"Tool: {tool_name}\n" + f"Description: {tool_description}\n" + f"Arguments: {tool_args}\n\n" + f"Generate a realistic response that this tool would return " + f"given these arguments.\n" + f"Return ONLY the tool's output, no explanation or preamble. " + f"Introduce variation into your responses." + ) + + # Get emulated response from LLM + response = self.model.invoke([HumanMessage(prompt)]) + + # Short-circuit: return emulated result without executing real tool + return ToolMessage( + content=response.content, + tool_call_id=request.tool_call["id"], + name=tool_name, + ) + + async def awrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]], + ) -> ToolMessage | Command: + """Async version of wrap_tool_call. + + Emulate tool execution using LLM if tool should be emulated. + + Args: + request: Tool call request to potentially emulate. + handler: Async callback to execute the tool (can be called multiple times). + + Returns: + ToolMessage with emulated response if tool should be emulated, + otherwise calls handler for normal execution. + """ + tool_name = request.tool_call["name"] + + # Check if this tool should be emulated + should_emulate = self.emulate_all or tool_name in self.tools_to_emulate + + if not should_emulate: + # Let it execute normally by calling the handler + return await handler(request) + + # Extract tool information for emulation + tool_args = request.tool_call["args"] + tool_description = request.tool.description if request.tool else "No description available" + + # Build prompt for emulator LLM + prompt = ( + f"You are emulating a tool call for testing purposes.\n\n" + f"Tool: {tool_name}\n" + f"Description: {tool_description}\n" + f"Arguments: {tool_args}\n\n" + f"Generate a realistic response that this tool would return " + f"given these arguments.\n" + f"Return ONLY the tool's output, no explanation or preamble. " + f"Introduce variation into your responses." + ) + + # Get emulated response from LLM (using async invoke) + response = await self.model.ainvoke([HumanMessage(prompt)]) + + # Short-circuit: return emulated result without executing real tool + return ToolMessage( + content=response.content, + tool_call_id=request.tool_call["id"], + name=tool_name, + ) diff --git a/libs/langchain_v1/langchain/agents/middleware/tool_retry.py b/libs/langchain_v1/langchain/agents/middleware/tool_retry.py new file mode 100644 index 00000000000..361158b0c61 --- /dev/null +++ b/libs/langchain_v1/langchain/agents/middleware/tool_retry.py @@ -0,0 +1,384 @@ +"""Tool retry middleware for agents.""" + +from __future__ import annotations + +import asyncio +import random +import time +from typing import TYPE_CHECKING, Literal + +from langchain_core.messages import ToolMessage + +from langchain.agents.middleware.types import AgentMiddleware + +if TYPE_CHECKING: + from collections.abc import Awaitable, Callable + + from langgraph.types import Command + + from langchain.agents.middleware.types import ToolCallRequest + from langchain.tools import BaseTool + + +class ToolRetryMiddleware(AgentMiddleware): + """Middleware that automatically retries failed tool calls with configurable backoff. + + Supports retrying on specific exceptions and exponential backoff. + + Examples: + Basic usage with default settings (2 retries, exponential backoff): + ```python + from langchain.agents import create_agent + from langchain.agents.middleware import ToolRetryMiddleware + + agent = create_agent(model, tools=[search_tool], middleware=[ToolRetryMiddleware()]) + ``` + + Retry specific exceptions only: + ```python + from requests.exceptions import RequestException, Timeout + + retry = ToolRetryMiddleware( + max_retries=4, + retry_on=(RequestException, Timeout), + backoff_factor=1.5, + ) + ``` + + Custom exception filtering: + ```python + from requests.exceptions import HTTPError + + + def should_retry(exc: Exception) -> bool: + # Only retry on 5xx errors + if isinstance(exc, HTTPError): + return 500 <= exc.status_code < 600 + return False + + + retry = ToolRetryMiddleware( + max_retries=3, + retry_on=should_retry, + ) + ``` + + Apply to specific tools with custom error handling: + ```python + def format_error(exc: Exception) -> str: + return "Database temporarily unavailable. Please try again later." + + + retry = ToolRetryMiddleware( + max_retries=4, + tools=["search_database"], + on_failure=format_error, + ) + ``` + + Apply to specific tools using BaseTool instances: + ```python + from langchain_core.tools import tool + + + @tool + def search_database(query: str) -> str: + '''Search the database.''' + return results + + + retry = ToolRetryMiddleware( + max_retries=4, + tools=[search_database], # Pass BaseTool instance + ) + ``` + + Constant backoff (no exponential growth): + ```python + retry = ToolRetryMiddleware( + max_retries=5, + backoff_factor=0.0, # No exponential growth + initial_delay=2.0, # Always wait 2 seconds + ) + ``` + + Raise exception on failure: + ```python + retry = ToolRetryMiddleware( + max_retries=2, + on_failure="raise", # Re-raise exception instead of returning message + ) + ``` + """ + + def __init__( + self, + *, + max_retries: int = 2, + tools: list[BaseTool | str] | None = None, + retry_on: tuple[type[Exception], ...] | Callable[[Exception], bool] = (Exception,), + on_failure: ( + Literal["raise", "return_message"] | Callable[[Exception], str] + ) = "return_message", + backoff_factor: float = 2.0, + initial_delay: float = 1.0, + max_delay: float = 60.0, + jitter: bool = True, + ) -> None: + """Initialize ToolRetryMiddleware. + + Args: + max_retries: Maximum number of retry attempts after the initial call. + Default is 2 retries (3 total attempts). Must be >= 0. + tools: Optional list of tools or tool names to apply retry logic to. + Can be a list of `BaseTool` instances or tool name strings. + If `None`, applies to all tools. Default is `None`. + retry_on: Either a tuple of exception types to retry on, or a callable + that takes an exception and returns `True` if it should be retried. + Default is to retry on all exceptions. + on_failure: Behavior when all retries are exhausted. Options: + - `"return_message"` (default): Return a ToolMessage with error details, + allowing the LLM to handle the failure and potentially recover. + - `"raise"`: Re-raise the exception, stopping agent execution. + - Custom callable: Function that takes the exception and returns a string + for the ToolMessage content, allowing custom error formatting. + backoff_factor: Multiplier for exponential backoff. Each retry waits + `initial_delay * (backoff_factor ** retry_number)` seconds. + Set to 0.0 for constant delay. Default is 2.0. + initial_delay: Initial delay in seconds before first retry. Default is 1.0. + max_delay: Maximum delay in seconds between retries. Caps exponential + backoff growth. Default is 60.0. + jitter: Whether to add random jitter (Β±25%) to delay to avoid thundering herd. + Default is `True`. + + Raises: + ValueError: If max_retries < 0 or delays are negative. + """ + super().__init__() + + # Validate parameters + if max_retries < 0: + msg = "max_retries must be >= 0" + raise ValueError(msg) + if initial_delay < 0: + msg = "initial_delay must be >= 0" + raise ValueError(msg) + if max_delay < 0: + msg = "max_delay must be >= 0" + raise ValueError(msg) + if backoff_factor < 0: + msg = "backoff_factor must be >= 0" + raise ValueError(msg) + + self.max_retries = max_retries + + # Extract tool names from BaseTool instances or strings + self._tool_filter: list[str] | None + if tools is not None: + self._tool_filter = [tool.name if not isinstance(tool, str) else tool for tool in tools] + else: + self._tool_filter = None + + self.tools = [] # No additional tools registered by this middleware + self.retry_on = retry_on + self.on_failure = on_failure + self.backoff_factor = backoff_factor + self.initial_delay = initial_delay + self.max_delay = max_delay + self.jitter = jitter + + def _should_retry_tool(self, tool_name: str) -> bool: + """Check if retry logic should apply to this tool. + + Args: + tool_name: Name of the tool being called. + + Returns: + `True` if retry logic should apply, `False` otherwise. + """ + if self._tool_filter is None: + return True + return tool_name in self._tool_filter + + def _should_retry_exception(self, exc: Exception) -> bool: + """Check if the exception should trigger a retry. + + Args: + exc: The exception that occurred. + + Returns: + `True` if the exception should be retried, `False` otherwise. + """ + if callable(self.retry_on): + return self.retry_on(exc) + return isinstance(exc, self.retry_on) + + def _calculate_delay(self, retry_number: int) -> float: + """Calculate delay for the given retry attempt. + + Args: + retry_number: The retry attempt number (0-indexed). + + Returns: + Delay in seconds before next retry. + """ + if self.backoff_factor == 0.0: + delay = self.initial_delay + else: + delay = self.initial_delay * (self.backoff_factor**retry_number) + + # Cap at max_delay + delay = min(delay, self.max_delay) + + if self.jitter and delay > 0: + jitter_amount = delay * 0.25 + delay = delay + random.uniform(-jitter_amount, jitter_amount) # noqa: S311 + # Ensure delay is not negative after jitter + delay = max(0, delay) + + return delay + + def _format_failure_message(self, tool_name: str, exc: Exception, attempts_made: int) -> str: + """Format the failure message when retries are exhausted. + + Args: + tool_name: Name of the tool that failed. + exc: The exception that caused the failure. + attempts_made: Number of attempts actually made. + + Returns: + Formatted error message string. + """ + exc_type = type(exc).__name__ + attempt_word = "attempt" if attempts_made == 1 else "attempts" + return f"Tool '{tool_name}' failed after {attempts_made} {attempt_word} with {exc_type}" + + def _handle_failure( + self, tool_name: str, tool_call_id: str | None, exc: Exception, attempts_made: int + ) -> ToolMessage: + """Handle failure when all retries are exhausted. + + Args: + tool_name: Name of the tool that failed. + tool_call_id: ID of the tool call (may be None). + exc: The exception that caused the failure. + attempts_made: Number of attempts actually made. + + Returns: + ToolMessage with error details. + + Raises: + Exception: If on_failure is "raise", re-raises the exception. + """ + if self.on_failure == "raise": + raise exc + + if callable(self.on_failure): + content = self.on_failure(exc) + else: + content = self._format_failure_message(tool_name, exc, attempts_made) + + return ToolMessage( + content=content, + tool_call_id=tool_call_id, + name=tool_name, + status="error", + ) + + def wrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], ToolMessage | Command], + ) -> ToolMessage | Command: + """Intercept tool execution and retry on failure. + + Args: + request: Tool call request with call dict, BaseTool, state, and runtime. + handler: Callable to execute the tool (can be called multiple times). + + Returns: + ToolMessage or Command (the final result). + """ + tool_name = request.tool.name if request.tool else request.tool_call["name"] + + # Check if retry should apply to this tool + if not self._should_retry_tool(tool_name): + return handler(request) + + tool_call_id = request.tool_call["id"] + + # Initial attempt + retries + for attempt in range(self.max_retries + 1): + try: + return handler(request) + except Exception as exc: # noqa: BLE001 + attempts_made = attempt + 1 # attempt is 0-indexed + + # Check if we should retry this exception + if not self._should_retry_exception(exc): + # Exception is not retryable, handle failure immediately + return self._handle_failure(tool_name, tool_call_id, exc, attempts_made) + + # Check if we have more retries left + if attempt < self.max_retries: + # Calculate and apply backoff delay + delay = self._calculate_delay(attempt) + if delay > 0: + time.sleep(delay) + # Continue to next retry + else: + # No more retries, handle failure + return self._handle_failure(tool_name, tool_call_id, exc, attempts_made) + + # Unreachable: loop always returns via handler success or _handle_failure + msg = "Unexpected: retry loop completed without returning" + raise RuntimeError(msg) + + async def awrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]], + ) -> ToolMessage | Command: + """Intercept and control async tool execution with retry logic. + + Args: + request: Tool call request with call dict, BaseTool, state, and runtime. + handler: Async callable to execute the tool and returns ToolMessage or Command. + + Returns: + ToolMessage or Command (the final result). + """ + tool_name = request.tool.name if request.tool else request.tool_call["name"] + + # Check if retry should apply to this tool + if not self._should_retry_tool(tool_name): + return await handler(request) + + tool_call_id = request.tool_call["id"] + + # Initial attempt + retries + for attempt in range(self.max_retries + 1): + try: + return await handler(request) + except Exception as exc: # noqa: BLE001 + attempts_made = attempt + 1 # attempt is 0-indexed + + # Check if we should retry this exception + if not self._should_retry_exception(exc): + # Exception is not retryable, handle failure immediately + return self._handle_failure(tool_name, tool_call_id, exc, attempts_made) + + # Check if we have more retries left + if attempt < self.max_retries: + # Calculate and apply backoff delay + delay = self._calculate_delay(attempt) + if delay > 0: + await asyncio.sleep(delay) + # Continue to next retry + else: + # No more retries, handle failure + return self._handle_failure(tool_name, tool_call_id, exc, attempts_made) + + # Unreachable: loop always returns via handler success or _handle_failure + msg = "Unexpected: retry loop completed without returning" + raise RuntimeError(msg) diff --git a/libs/langchain_v1/langchain/agents/middleware/tool_selection.py b/libs/langchain_v1/langchain/agents/middleware/tool_selection.py index f63fb68be71..b6746738156 100644 --- a/libs/langchain_v1/langchain/agents/middleware/tool_selection.py +++ b/libs/langchain_v1/langchain/agents/middleware/tool_selection.py @@ -12,11 +12,16 @@ if TYPE_CHECKING: from langchain.tools import BaseTool from langchain_core.language_models.chat_models import BaseChatModel -from langchain_core.messages import AIMessage, HumanMessage +from langchain_core.messages import HumanMessage from pydantic import Field, TypeAdapter from typing_extensions import TypedDict -from langchain.agents.middleware.types import AgentMiddleware, ModelRequest +from langchain.agents.middleware.types import ( + AgentMiddleware, + ModelCallResult, + ModelRequest, + ModelResponse, +) from langchain.chat_models.base import init_chat_model logger = logging.getLogger(__name__) @@ -245,8 +250,8 @@ class LLMToolSelectorMiddleware(AgentMiddleware): def wrap_model_call( self, request: ModelRequest, - handler: Callable[[ModelRequest], AIMessage], - ) -> AIMessage: + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelCallResult: """Filter tools based on LLM selection before invoking the model via handler.""" selection_request = self._prepare_selection_request(request) if selection_request is None: @@ -276,8 +281,8 @@ class LLMToolSelectorMiddleware(AgentMiddleware): async def awrap_model_call( self, request: ModelRequest, - handler: Callable[[ModelRequest], Awaitable[AIMessage]], - ) -> AIMessage: + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelCallResult: """Filter tools based on LLM selection before invoking the model via handler.""" selection_request = self._prepare_selection_request(request) if selection_request is None: diff --git a/libs/langchain_v1/langchain/agents/middleware/types.py b/libs/langchain_v1/langchain/agents/middleware/types.py index d0cc40b3774..41f16e8f974 100644 --- a/libs/langchain_v1/langchain/agents/middleware/types.py +++ b/libs/langchain_v1/langchain/agents/middleware/types.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable -from dataclasses import dataclass, field +from dataclasses import dataclass, field, replace from inspect import iscoroutinefunction from typing import ( TYPE_CHECKING, @@ -19,16 +19,22 @@ from typing import ( if TYPE_CHECKING: from collections.abc import Awaitable - from langchain.tools.tool_node import ToolCallRequest +# Needed as top level import for Pydantic schema generation on AgentState +from typing import TypeAlias -# needed as top level import for pydantic schema generation on AgentState -from langchain_core.messages import AIMessage, AnyMessage, ToolMessage # noqa: TC002 +from langchain_core.messages import ( # noqa: TC002 + AIMessage, + AnyMessage, + BaseMessage, + SystemMessage, + ToolMessage, +) from langgraph.channels.ephemeral_value import EphemeralValue -from langgraph.channels.untracked_value import UntrackedValue from langgraph.graph.message import add_messages +from langgraph.prebuilt.tool_node import ToolCallRequest, ToolCallWrapper from langgraph.types import Command # noqa: TC002 from langgraph.typing import ContextT -from typing_extensions import NotRequired, Required, TypedDict, TypeVar +from typing_extensions import NotRequired, Required, TypedDict, TypeVar, Unpack if TYPE_CHECKING: from langchain_core.language_models.chat_models import BaseChatModel @@ -42,8 +48,12 @@ __all__ = [ "AgentState", "ContextT", "ModelRequest", + "ModelResponse", "OmitFromSchema", - "PublicAgentState", + "ResponseT", + "StateT_co", + "ToolCallRequest", + "ToolCallWrapper", "after_agent", "after_model", "before_agent", @@ -59,12 +69,24 @@ JumpTo = Literal["tools", "model", "end"] ResponseT = TypeVar("ResponseT") +class _ModelRequestOverrides(TypedDict, total=False): + """Possible overrides for ModelRequest.override() method.""" + + model: BaseChatModel + system_prompt: str | None + messages: list[AnyMessage] + tool_choice: Any | None + tools: list[BaseTool | dict] + response_format: ResponseFormat | None + model_settings: dict[str, Any] + + @dataclass class ModelRequest: """Model request information for the agent.""" model: BaseChatModel - system_prompt: str | None + system_prompt: str | SystemMessage | None messages: list[AnyMessage] # excluding system prompt tool_choice: Any | None tools: list[BaseTool | dict] @@ -73,6 +95,61 @@ class ModelRequest: runtime: Runtime[ContextT] # type: ignore[valid-type] model_settings: dict[str, Any] = field(default_factory=dict) + def override(self, **overrides: Unpack[_ModelRequestOverrides]) -> ModelRequest: + """Replace the request with a new request with the given overrides. + + Returns a new `ModelRequest` instance with the specified attributes replaced. + This follows an immutable pattern, leaving the original request unchanged. + + Args: + **overrides: Keyword arguments for attributes to override. Supported keys: + - model: BaseChatModel instance + - system_prompt: Optional system prompt string or SystemMessage object + - messages: List of messages + - tool_choice: Tool choice configuration + - tools: List of available tools + - response_format: Response format specification + - model_settings: Additional model settings + + Returns: + New ModelRequest instance with specified overrides applied. + + Examples: + ```python + # Create a new request with different model + new_request = request.override(model=different_model) + + # Override multiple attributes + new_request = request.override(system_prompt="New instructions", tool_choice="auto") + ``` + """ + return replace(self, **overrides) + + +@dataclass +class ModelResponse: + """Response from model execution including messages and optional structured output. + + The result will usually contain a single AIMessage, but may include + an additional ToolMessage if the model used a tool for structured output. + """ + + result: list[BaseMessage] + """List of messages from model execution.""" + + structured_response: Any = None + """Parsed structured output if response_format was specified, None otherwise.""" + + +# Type alias for middleware return type - allows returning either full response or just AIMessage +ModelCallResult: TypeAlias = "ModelResponse | AIMessage" +"""Type alias for model call handler return value. + +Middleware can return either: +- ModelResponse: Full response with messages and optional structured output +- AIMessage: Simplified return for simple use cases +""" + @dataclass class OmitFromSchema: @@ -101,21 +178,23 @@ class AgentState(TypedDict, Generic[ResponseT]): messages: Required[Annotated[list[AnyMessage], add_messages]] jump_to: NotRequired[Annotated[JumpTo | None, EphemeralValue, PrivateStateAttr]] structured_response: NotRequired[Annotated[ResponseT, OmitFromInput]] - thread_model_call_count: NotRequired[Annotated[int, PrivateStateAttr]] - run_model_call_count: NotRequired[Annotated[int, UntrackedValue, PrivateStateAttr]] -class PublicAgentState(TypedDict, Generic[ResponseT]): - """Public state schema for the agent. +class _InputAgentState(TypedDict): # noqa: PYI049 + """Input state schema for the agent.""" - Just used for typing purposes. - """ + messages: Required[Annotated[list[AnyMessage | dict], add_messages]] + + +class _OutputAgentState(TypedDict, Generic[ResponseT]): # noqa: PYI049 + """Output state schema for the agent.""" messages: Required[Annotated[list[AnyMessage], add_messages]] structured_response: NotRequired[ResponseT] StateT = TypeVar("StateT", bound=AgentState, default=AgentState) +StateT_co = TypeVar("StateT_co", bound=AgentState, default=AgentState, covariant=True) StateT_contra = TypeVar("StateT_contra", bound=AgentState, contravariant=True) @@ -167,23 +246,23 @@ class AgentMiddleware(Generic[StateT, ContextT]): def wrap_model_call( self, request: ModelRequest, - handler: Callable[[ModelRequest], AIMessage], - ) -> AIMessage: + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelCallResult: """Intercept and control model execution via handler callback. - The handler callback executes the model request and returns an AIMessage. + The handler callback executes the model request and returns a `ModelResponse`. Middleware can call the handler multiple times for retry logic, skip calling it to short-circuit, or modify the request/response. Multiple middleware compose with first in list as outermost layer. Args: request: Model request to execute (includes state and runtime). - handler: Callback that executes the model request and returns AIMessage. - Call this to execute the model. Can be called multiple times - for retry logic. Can skip calling it to short-circuit. + handler: Callback that executes the model request and returns + `ModelResponse`. Call this to execute the model. Can be called multiple + times for retry logic. Can skip calling it to short-circuit. Returns: - Final AIMessage to use (from handler or custom). + `ModelCallResult` Examples: Retry on error: @@ -200,8 +279,12 @@ class AgentMiddleware(Generic[StateT, ContextT]): Rewrite response: ```python def wrap_model_call(self, request, handler): - result = handler(request) - return AIMessage(content=f"[{result.content}]") + response = handler(request) + ai_msg = response.result[0] + return ModelResponse( + result=[AIMessage(content=f"[{ai_msg.content}]")], + structured_response=response.structured_response, + ) ``` Error to fallback: @@ -210,7 +293,7 @@ class AgentMiddleware(Generic[StateT, ContextT]): try: return handler(request) except Exception: - return AIMessage(content="Service unavailable") + return ModelResponse(result=[AIMessage(content="Service unavailable")]) ``` Cache/short-circuit: @@ -218,26 +301,51 @@ class AgentMiddleware(Generic[StateT, ContextT]): def wrap_model_call(self, request, handler): if cached := get_cache(request): return cached # Short-circuit with cached result - result = handler(request) - save_cache(request, result) - return result + response = handler(request) + save_cache(request, response) + return response + ``` + + Simple AIMessage return (converted automatically): + ```python + def wrap_model_call(self, request, handler): + response = handler(request) + # Can return AIMessage directly for simple cases + return AIMessage(content="Simplified response") ``` """ - raise NotImplementedError + msg = ( + "Synchronous implementation of wrap_model_call is not available. " + "You are likely encountering this error because you defined only the async version " + "(awrap_model_call) and invoked your agent in a synchronous context " + "(e.g., using `stream()` or `invoke()`). " + "To resolve this, either: " + "(1) subclass AgentMiddleware and implement the synchronous wrap_model_call method, " + "(2) use the @wrap_model_call decorator on a standalone sync function, or " + "(3) invoke your agent asynchronously using `astream()` or `ainvoke()`." + ) + raise NotImplementedError(msg) async def awrap_model_call( self, request: ModelRequest, - handler: Callable[[ModelRequest], Awaitable[AIMessage]], - ) -> AIMessage: - """Async version of wrap_model_call. + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelCallResult: + """Intercept and control async model execution via handler callback. + + The handler callback executes the model request and returns a `ModelResponse`. + Middleware can call the handler multiple times for retry logic, skip calling + it to short-circuit, or modify the request/response. Multiple middleware + compose with first in list as outermost layer. Args: request: Model request to execute (includes state and runtime). - handler: Async callback that executes the model request. + handler: Async callback that executes the model request and returns + `ModelResponse`. Call this to execute the model. Can be called multiple + times for retry logic. Can skip calling it to short-circuit. Returns: - Final AIMessage to use (from handler or custom). + ModelCallResult Examples: Retry on error: @@ -251,7 +359,17 @@ class AgentMiddleware(Generic[StateT, ContextT]): raise ``` """ - raise NotImplementedError + msg = ( + "Asynchronous implementation of awrap_model_call is not available. " + "You are likely encountering this error because you defined only the sync version " + "(wrap_model_call) and invoked your agent in an asynchronous context " + "(e.g., using `astream()` or `ainvoke()`). " + "To resolve this, either: " + "(1) subclass AgentMiddleware and implement the asynchronous awrap_model_call method, " + "(2) use the @wrap_model_call decorator on a standalone async function, or " + "(3) invoke your agent synchronously using `stream()` or `invoke()`." + ) + raise NotImplementedError(msg) def after_agent(self, state: StateT, runtime: Runtime[ContextT]) -> dict[str, Any] | None: """Logic to run after the agent execution completes.""" @@ -269,15 +387,15 @@ class AgentMiddleware(Generic[StateT, ContextT]): """Intercept tool execution for retries, monitoring, or modification. Multiple middleware compose automatically (first defined = outermost). - Exceptions propagate unless handle_tool_errors is configured on ToolNode. + Exceptions propagate unless `handle_tool_errors` is configured on `ToolNode`. Args: - request: Tool call request with call dict, BaseTool, state, and runtime. - Access state via request.state and runtime via request.runtime. + request: Tool call request with call `dict`, `BaseTool`, state, and runtime. + Access state via `request.state` and runtime via `request.runtime`. handler: Callable to execute the tool (can be called multiple times). Returns: - ToolMessage or Command (the final result). + `ToolMessage` or `Command` (the final result). The handler callable can be invoked multiple times for retry logic. Each call to handler is independent and stateless. @@ -285,12 +403,15 @@ class AgentMiddleware(Generic[StateT, ContextT]): Examples: Modify request before execution: + ```python def wrap_tool_call(self, request, handler): request.tool_call["args"]["value"] *= 2 return handler(request) + ``` Retry on error (call handler multiple times): + ```python def wrap_tool_call(self, request, handler): for attempt in range(3): try: @@ -301,9 +422,11 @@ class AgentMiddleware(Generic[StateT, ContextT]): if attempt == 2: raise return result + ``` Conditional retry based on response: + ```python def wrap_tool_call(self, request, handler): for attempt in range(3): result = handler(request) @@ -312,12 +435,84 @@ class AgentMiddleware(Generic[StateT, ContextT]): if attempt < 2: continue return result + ``` """ - raise NotImplementedError + msg = ( + "Synchronous implementation of wrap_tool_call is not available. " + "You are likely encountering this error because you defined only the async version " + "(awrap_tool_call) and invoked your agent in a synchronous context " + "(e.g., using `stream()` or `invoke()`). " + "To resolve this, either: " + "(1) subclass AgentMiddleware and implement the synchronous wrap_tool_call method, " + "(2) use the @wrap_tool_call decorator on a standalone sync function, or " + "(3) invoke your agent asynchronously using `astream()` or `ainvoke()`." + ) + raise NotImplementedError(msg) + + async def awrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]], + ) -> ToolMessage | Command: + """Intercept and control async tool execution via handler callback. + + The handler callback executes the tool call and returns a `ToolMessage` or + `Command`. Middleware can call the handler multiple times for retry logic, skip + calling it to short-circuit, or modify the request/response. Multiple middleware + compose with first in list as outermost layer. + + Args: + request: Tool call request with call `dict`, `BaseTool`, state, and runtime. + Access state via `request.state` and runtime via `request.runtime`. + handler: Async callable to execute the tool and returns `ToolMessage` or + `Command`. Call this to execute the tool. Can be called multiple times + for retry logic. Can skip calling it to short-circuit. + + Returns: + `ToolMessage` or `Command` (the final result). + + The handler callable can be invoked multiple times for retry logic. + Each call to handler is independent and stateless. + + Examples: + Async retry on error: + ```python + async def awrap_tool_call(self, request, handler): + for attempt in range(3): + try: + result = await handler(request) + if is_valid(result): + return result + except Exception: + if attempt == 2: + raise + return result + ``` + + ```python + async def awrap_tool_call(self, request, handler): + if cached := await get_cache_async(request): + return ToolMessage(content=cached, tool_call_id=request.tool_call["id"]) + result = await handler(request) + await save_cache_async(request, result) + return result + ``` + """ + msg = ( + "Asynchronous implementation of awrap_tool_call is not available. " + "You are likely encountering this error because you defined only the sync version " + "(wrap_tool_call) and invoked your agent in an asynchronous context " + "(e.g., using `astream()` or `ainvoke()`). " + "To resolve this, either: " + "(1) subclass AgentMiddleware and implement the asynchronous awrap_tool_call method, " + "(2) use the @wrap_tool_call decorator on a standalone async function, or " + "(3) invoke your agent synchronously using `stream()` or `invoke()`." + ) + raise NotImplementedError(msg) class _CallableWithStateAndRuntime(Protocol[StateT_contra, ContextT]): - """Callable with AgentState and Runtime as arguments.""" + """Callable with `AgentState` and `Runtime` as arguments.""" def __call__( self, state: StateT_contra, runtime: Runtime[ContextT] @@ -327,7 +522,7 @@ class _CallableWithStateAndRuntime(Protocol[StateT_contra, ContextT]): class _CallableReturningPromptString(Protocol[StateT_contra, ContextT]): # type: ignore[misc] - """Callable that returns a prompt string given ModelRequest (contains state and runtime).""" + """Callable that returns a prompt string given `ModelRequest` (contains state and runtime).""" def __call__(self, request: ModelRequest) -> str | Awaitable[str]: """Generate a system prompt string based on the request.""" @@ -337,14 +532,15 @@ class _CallableReturningPromptString(Protocol[StateT_contra, ContextT]): # type class _CallableReturningModelResponse(Protocol[StateT_contra, ContextT]): # type: ignore[misc] """Callable for model call interception with handler callback. - Receives handler callback to execute model and returns final AIMessage. + Receives handler callback to execute model and returns `ModelResponse` or + `AIMessage`. """ def __call__( self, request: ModelRequest, - handler: Callable[[ModelRequest], AIMessage], - ) -> AIMessage: + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelCallResult: """Intercept model execution via handler callback.""" ... @@ -352,7 +548,8 @@ class _CallableReturningModelResponse(Protocol[StateT_contra, ContextT]): # typ class _CallableReturningToolResponse(Protocol): """Callable for tool call interception with handler callback. - Receives handler callback to execute tool and returns final ToolMessage or Command. + Receives handler callback to execute tool and returns final `ToolMessage` or + `Command`. """ def __call__( @@ -445,22 +642,22 @@ def before_model( Callable[[_CallableWithStateAndRuntime[StateT, ContextT]], AgentMiddleware[StateT, ContextT]] | AgentMiddleware[StateT, ContextT] ): - """Decorator used to dynamically create a middleware with the before_model hook. + """Decorator used to dynamically create a middleware with the `before_model` hook. Args: func: The function to be decorated. Must accept: `state: StateT, runtime: Runtime[ContextT]` - State and runtime context state_schema: Optional custom state schema type. If not provided, uses the default - AgentState schema. + `AgentState` schema. tools: Optional list of additional tools to register with this middleware. can_jump_to: Optional list of valid jump destinations for conditional edges. - Valid values are: "tools", "model", "end" + Valid values are: `"tools"`, `"model"`, `"end"` name: Optional name for the generated middleware class. If not provided, uses the decorated function's name. Returns: - Either an AgentMiddleware instance (if func is provided directly) or a decorator function - that can be applied to a function it is wrapping. + Either an `AgentMiddleware` instance (if func is provided directly) or a + decorator function that can be applied to a function it is wrapping. The decorated function should return: - `dict[str, Any]` - State updates to merge into the agent state @@ -587,22 +784,22 @@ def after_model( Callable[[_CallableWithStateAndRuntime[StateT, ContextT]], AgentMiddleware[StateT, ContextT]] | AgentMiddleware[StateT, ContextT] ): - """Decorator used to dynamically create a middleware with the after_model hook. + """Decorator used to dynamically create a middleware with the `after_model` hook. Args: func: The function to be decorated. Must accept: `state: StateT, runtime: Runtime[ContextT]` - State and runtime context - state_schema: Optional custom state schema type. If not provided, uses the default - AgentState schema. + state_schema: Optional custom state schema type. If not provided, uses the + default `AgentState` schema. tools: Optional list of additional tools to register with this middleware. can_jump_to: Optional list of valid jump destinations for conditional edges. - Valid values are: "tools", "model", "end" + Valid values are: `"tools"`, `"model"`, `"end"` name: Optional name for the generated middleware class. If not provided, uses the decorated function's name. Returns: - Either an AgentMiddleware instance (if func is provided) or a decorator function - that can be applied to a function. + Either an `AgentMiddleware` instance (if func is provided) or a decorator + function that can be applied to a function. The decorated function should return: - `dict[str, Any]` - State updates to merge into the agent state @@ -718,22 +915,22 @@ def before_agent( Callable[[_CallableWithStateAndRuntime[StateT, ContextT]], AgentMiddleware[StateT, ContextT]] | AgentMiddleware[StateT, ContextT] ): - """Decorator used to dynamically create a middleware with the before_agent hook. + """Decorator used to dynamically create a middleware with the `before_agent` hook. Args: func: The function to be decorated. Must accept: `state: StateT, runtime: Runtime[ContextT]` - State and runtime context - state_schema: Optional custom state schema type. If not provided, uses the default - AgentState schema. + state_schema: Optional custom state schema type. If not provided, uses the + default `AgentState` schema. tools: Optional list of additional tools to register with this middleware. can_jump_to: Optional list of valid jump destinations for conditional edges. - Valid values are: "tools", "model", "end" + Valid values are: `"tools"`, `"model"`, `"end"` name: Optional name for the generated middleware class. If not provided, uses the decorated function's name. Returns: - Either an AgentMiddleware instance (if func is provided directly) or a decorator function - that can be applied to a function it is wrapping. + Either an `AgentMiddleware` instance (if func is provided directly) or a + decorator function that can be applied to a function it is wrapping. The decorated function should return: - `dict[str, Any]` - State updates to merge into the agent state @@ -860,22 +1057,22 @@ def after_agent( Callable[[_CallableWithStateAndRuntime[StateT, ContextT]], AgentMiddleware[StateT, ContextT]] | AgentMiddleware[StateT, ContextT] ): - """Decorator used to dynamically create a middleware with the after_agent hook. + """Decorator used to dynamically create a middleware with the `after_agent` hook. Args: func: The function to be decorated. Must accept: `state: StateT, runtime: Runtime[ContextT]` - State and runtime context - state_schema: Optional custom state schema type. If not provided, uses the default - AgentState schema. + state_schema: Optional custom state schema type. If not provided, uses the + default `AgentState` schema. tools: Optional list of additional tools to register with this middleware. can_jump_to: Optional list of valid jump destinations for conditional edges. - Valid values are: "tools", "model", "end" + Valid values are: `"tools"`, `"model"`, `"end"` name: Optional name for the generated middleware class. If not provided, uses the decorated function's name. Returns: - Either an AgentMiddleware instance (if func is provided) or a decorator function - that can be applied to a function. + Either an `AgentMiddleware` instance (if func is provided) or a decorator + function that can be applied to a function. The decorated function should return: - `dict[str, Any]` - State updates to merge into the agent state @@ -1037,8 +1234,8 @@ def dynamic_prompt( async def async_wrapped( self: AgentMiddleware[StateT, ContextT], # noqa: ARG001 request: ModelRequest, - handler: Callable[[ModelRequest], Awaitable[AIMessage]], - ) -> AIMessage: + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelCallResult: prompt = await func(request) # type: ignore[misc] request.system_prompt = prompt return await handler(request) @@ -1058,12 +1255,22 @@ def dynamic_prompt( def wrapped( self: AgentMiddleware[StateT, ContextT], # noqa: ARG001 request: ModelRequest, - handler: Callable[[ModelRequest], AIMessage], - ) -> AIMessage: - prompt = cast("str", func(request)) + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelCallResult: + prompt = cast("str | SystemMessage", func(request)) request.system_prompt = prompt return handler(request) + async def async_wrapped_from_sync( + self: AgentMiddleware[StateT, ContextT], # noqa: ARG001 + request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelCallResult: + # Delegate to sync function + prompt = cast("str | SystemMessage", func(request)) + request.system_prompt = prompt + return await handler(request) + middleware_name = cast("str", getattr(func, "__name__", "DynamicPromptMiddleware")) return type( @@ -1073,6 +1280,7 @@ def dynamic_prompt( "state_schema": AgentState, "tools": [], "wrap_model_call": wrapped, + "awrap_model_call": async_wrapped_from_sync, }, )() @@ -1113,20 +1321,21 @@ def wrap_model_call( ] | AgentMiddleware[StateT, ContextT] ): - """Create middleware with wrap_model_call hook from a function. + """Create middleware with `wrap_model_call` hook from a function. Converts a function with handler callback into middleware that can intercept model calls, implement retry logic, handle errors, and rewrite responses. Args: func: Function accepting (request, handler) that calls handler(request) - to execute the model and returns final AIMessage. Request contains state and runtime. - state_schema: Custom state schema. Defaults to AgentState. + to execute the model and returns `ModelResponse` or `AIMessage`. + Request contains state and runtime. + state_schema: Custom state schema. Defaults to `AgentState`. tools: Additional tools to register with this middleware. name: Middleware class name. Defaults to function name. Returns: - AgentMiddleware instance if func provided, otherwise a decorator. + `AgentMiddleware` instance if func provided, otherwise a decorator. Examples: Basic retry logic: @@ -1157,12 +1366,24 @@ def wrap_model_call( return handler(request) ``` - Rewrite response content: + Rewrite response content (full ModelResponse): ```python @wrap_model_call def uppercase_responses(request, handler): - result = handler(request) - return AIMessage(content=result.content.upper()) + response = handler(request) + ai_msg = response.result[0] + return ModelResponse( + result=[AIMessage(content=ai_msg.content.upper())], + structured_response=response.structured_response, + ) + ``` + + Simple AIMessage return (converted automatically): + ```python + @wrap_model_call + def simple_response(request, handler): + # AIMessage is automatically converted to ModelResponse + return AIMessage(content="Simple response") ``` """ @@ -1176,8 +1397,8 @@ def wrap_model_call( async def async_wrapped( self: AgentMiddleware[StateT, ContextT], # noqa: ARG001 request: ModelRequest, - handler: Callable[[ModelRequest], Awaitable[AIMessage]], - ) -> AIMessage: + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelCallResult: return await func(request, handler) # type: ignore[misc, arg-type] middleware_name = name or cast( @@ -1197,8 +1418,8 @@ def wrap_model_call( def wrapped( self: AgentMiddleware[StateT, ContextT], # noqa: ARG001 request: ModelRequest, - handler: Callable[[ModelRequest], AIMessage], - ) -> AIMessage: + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelCallResult: return func(request, handler) middleware_name = name or cast("str", getattr(func, "__name__", "WrapModelCallMiddleware")) @@ -1248,28 +1469,22 @@ def wrap_tool_call( ] | AgentMiddleware ): - """Create middleware with wrap_tool_call hook from a function. + """Create middleware with `wrap_tool_call` hook from a function. Converts a function with handler callback into middleware that can intercept tool calls, implement retry logic, monitor execution, and modify responses. Args: func: Function accepting (request, handler) that calls - handler(request) to execute the tool and returns final ToolMessage or Command. + handler(request) to execute the tool and returns final `ToolMessage` or + `Command`. Can be sync or async. tools: Additional tools to register with this middleware. name: Middleware class name. Defaults to function name. Returns: - AgentMiddleware instance if func provided, otherwise a decorator. + `AgentMiddleware` instance if func provided, otherwise a decorator. Examples: - Basic passthrough: - ```python - @wrap_tool_call - def passthrough(request, handler): - return handler(request) - ``` - Retry logic: ```python @wrap_tool_call @@ -1283,6 +1498,18 @@ def wrap_tool_call( raise ``` + Async retry logic: + ```python + @wrap_tool_call + async def async_retry(request, handler): + for attempt in range(3): + try: + return await handler(request) + except Exception: + if attempt == 2: + raise + ``` + Modify request: ```python @wrap_tool_call @@ -1306,6 +1533,31 @@ def wrap_tool_call( def decorator( func: _CallableReturningToolResponse, ) -> AgentMiddleware: + is_async = iscoroutinefunction(func) + + if is_async: + + async def async_wrapped( + self: AgentMiddleware, # noqa: ARG001 + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]], + ) -> ToolMessage | Command: + return await func(request, handler) # type: ignore[arg-type,misc] + + middleware_name = name or cast( + "str", getattr(func, "__name__", "WrapToolCallMiddleware") + ) + + return type( + middleware_name, + (AgentMiddleware,), + { + "state_schema": AgentState, + "tools": tools or [], + "awrap_tool_call": async_wrapped, + }, + )() + def wrapped( self: AgentMiddleware, # noqa: ARG001 request: ToolCallRequest, diff --git a/libs/langchain_v1/langchain/agents/structured_output.py b/libs/langchain_v1/langchain/agents/structured_output.py index d7824ecaf2a..75038675807 100644 --- a/libs/langchain_v1/langchain/agents/structured_output.py +++ b/libs/langchain_v1/langchain/agents/structured_output.py @@ -34,17 +34,21 @@ SchemaKind = Literal["pydantic", "dataclass", "typeddict", "json_schema"] class StructuredOutputError(Exception): """Base class for structured output errors.""" + ai_message: AIMessage + class MultipleStructuredOutputsError(StructuredOutputError): """Raised when model returns multiple structured output tool calls when only one is expected.""" - def __init__(self, tool_names: list[str]) -> None: - """Initialize MultipleStructuredOutputsError. + def __init__(self, tool_names: list[str], ai_message: AIMessage) -> None: + """Initialize `MultipleStructuredOutputsError`. Args: tool_names: The names of the tools called for structured output. + ai_message: The AI message that contained the invalid multiple tool calls. """ self.tool_names = tool_names + self.ai_message = ai_message super().__init__( "Model incorrectly returned multiple structured responses " @@ -55,15 +59,17 @@ class MultipleStructuredOutputsError(StructuredOutputError): class StructuredOutputValidationError(StructuredOutputError): """Raised when structured output tool call arguments fail to parse according to the schema.""" - def __init__(self, tool_name: str, source: Exception) -> None: - """Initialize StructuredOutputValidationError. + def __init__(self, tool_name: str, source: Exception, ai_message: AIMessage) -> None: + """Initialize `StructuredOutputValidationError`. Args: tool_name: The name of the tool that failed. source: The exception that occurred. + ai_message: The AI message that contained the invalid structured output. """ self.tool_name = tool_name self.source = source + self.ai_message = ai_message super().__init__(f"Failed to parse structured output for tool '{tool_name}': {source}.") @@ -73,8 +79,9 @@ def _parse_with_schema( """Parse data using for any supported schema type. Args: - schema: The schema type (Pydantic model, dataclass, or TypedDict) - schema_kind: One of "pydantic", "dataclass", "typeddict", or "json_schema" + schema: The schema type (Pydantic model, `dataclass`, or `TypedDict`) + schema_kind: One of `"pydantic"`, `"dataclass"`, `"typeddict"`, or + `"json_schema"` data: The data to parse Returns: @@ -99,13 +106,14 @@ class _SchemaSpec(Generic[SchemaT]): """Describes a structured output schema.""" schema: type[SchemaT] - """The schema for the response, can be a Pydantic model, dataclass, TypedDict, + """The schema for the response, can be a Pydantic model, `dataclass`, `TypedDict`, or JSON schema dict.""" name: str """Name of the schema, used for tool calling. - If not provided, the name will be the model name or "response_format" if it's a JSON schema. + If not provided, the name will be the model name or `"response_format"` if it's a + JSON schema. """ description: str @@ -186,14 +194,15 @@ class ToolStrategy(Generic[SchemaT]): handle_errors: ( bool | str | type[Exception] | tuple[type[Exception], ...] | Callable[[Exception], str] ) - """Error handling strategy for structured output via ToolStrategy. Default is True. + """Error handling strategy for structured output via `ToolStrategy`. - - True: Catch all errors with default error template - - str: Catch all errors with this custom message - - type[Exception]: Only catch this exception type with default message - - tuple[type[Exception], ...]: Only catch these exception types with default message - - Callable[[Exception], str]: Custom function that returns error message - - False: No retry, let exceptions propagate + - `True`: Catch all errors with default error template + - `str`: Catch all errors with this custom message + - `type[Exception]`: Only catch this exception type with default message + - `tuple[type[Exception], ...]`: Only catch these exception types with default + message + - `Callable[[Exception], str]`: Custom function that returns error message + - `False`: No retry, let exceptions propagate """ def __init__( @@ -207,9 +216,10 @@ class ToolStrategy(Generic[SchemaT]): | tuple[type[Exception], ...] | Callable[[Exception], str] = True, ) -> None: - """Initialize ToolStrategy. + """Initialize `ToolStrategy`. - Initialize ToolStrategy with schemas, tool message content, and error handling strategy. + Initialize `ToolStrategy` with schemas, tool message content, and error handling + strategy. """ self.schema = schema self.tool_message_content = tool_message_content @@ -285,13 +295,13 @@ class OutputToolBinding(Generic[SchemaT]): @classmethod def from_schema_spec(cls, schema_spec: _SchemaSpec[SchemaT]) -> Self: - """Create an OutputToolBinding instance from a SchemaSpec. + """Create an `OutputToolBinding` instance from a `SchemaSpec`. Args: - schema_spec: The SchemaSpec to convert + schema_spec: The `SchemaSpec` to convert Returns: - An OutputToolBinding instance with the appropriate tool created + An `OutputToolBinding` instance with the appropriate tool created """ return cls( schema=schema_spec.schema, @@ -329,20 +339,20 @@ class ProviderStrategyBinding(Generic[SchemaT]): schema: type[SchemaT] """The original schema provided for structured output - (Pydantic model, dataclass, TypedDict, or JSON schema dict).""" + (Pydantic model, `dataclass`, `TypedDict`, or JSON schema dict).""" schema_kind: SchemaKind """Classification of the schema type for proper response construction.""" @classmethod def from_schema_spec(cls, schema_spec: _SchemaSpec[SchemaT]) -> Self: - """Create a ProviderStrategyBinding instance from a SchemaSpec. + """Create a `ProviderStrategyBinding` instance from a `SchemaSpec`. Args: - schema_spec: The SchemaSpec to convert + schema_spec: The `SchemaSpec` to convert Returns: - A ProviderStrategyBinding instance for parsing native structured output + A `ProviderStrategyBinding` instance for parsing native structured output """ return cls( schema=schema_spec.schema, @@ -350,10 +360,10 @@ class ProviderStrategyBinding(Generic[SchemaT]): ) def parse(self, response: AIMessage) -> SchemaT: - """Parse AIMessage content according to the schema. + """Parse `AIMessage` content according to the schema. Args: - response: The AI message containing the structured output + response: The `AIMessage` containing the structured output Returns: The parsed response according to the schema diff --git a/libs/langchain_v1/langchain/chat_models/__init__.py b/libs/langchain_v1/langchain/chat_models/__init__.py index 324c590865b..f52b04c4e06 100644 --- a/libs/langchain_v1/langchain/chat_models/__init__.py +++ b/libs/langchain_v1/langchain/chat_models/__init__.py @@ -1,4 +1,10 @@ -"""Chat models.""" +"""Entrypoint to using [chat models](https://docs.langchain.com/oss/python/langchain/models) in LangChain. + +!!! warning "Reference docs" + This page contains **reference documentation** for chat models. See + [the docs](https://docs.langchain.com/oss/python/langchain/models) for conceptual + guides, tutorials, and examples on using chat models. +""" # noqa: E501 from langchain_core.language_models import BaseChatModel diff --git a/libs/langchain_v1/langchain/chat_models/base.py b/libs/langchain_v1/langchain/chat_models/base.py index 28e92b0458f..d481eda62a2 100644 --- a/libs/langchain_v1/langchain/chat_models/base.py +++ b/libs/langchain_v1/langchain/chat_models/base.py @@ -4,14 +4,7 @@ from __future__ import annotations import warnings from importlib import util -from typing import ( - TYPE_CHECKING, - Any, - Literal, - TypeAlias, - cast, - overload, -) +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, cast, overload from langchain_core.language_models import BaseChatModel, LanguageModelInput from langchain_core.messages import AIMessage, AnyMessage @@ -71,167 +64,199 @@ def init_chat_model( config_prefix: str | None = None, **kwargs: Any, ) -> BaseChatModel | _ConfigurableModel: - """Initialize a ChatModel from the model name and provider. + """Initialize a chat model from any supported provider using a unified interface. + + **Two main use cases:** + + 1. **Fixed model** – specify the model upfront and get a ready-to-use chat model. + 2. **Configurable model** – choose to specify parameters (including model name) at + runtime via `config`. Makes it easy to switch between models/providers without + changing your code !!! note - Must have the integration package corresponding to the model provider - installed. + Requires the integration package for the chosen model provider to be installed. + + See the `model_provider` parameter below for specific package names + (e.g., `pip install langchain-openai`). + + Refer to the [provider integration's API reference](https://docs.langchain.com/oss/python/integrations/providers) + for supported model parameters to use as `**kwargs`. Args: - model: The name of the model, e.g. "o3-mini", "claude-3-5-sonnet-latest". You can - also specify model and model provider in a single argument using - '{model_provider}:{model}' format, e.g. "openai:o1". - model_provider: The model provider if not specified as part of model arg (see - above). Supported model_provider values and the corresponding integration - package are: + model: The name or ID of the model, e.g. `'o3-mini'`, `'claude-sonnet-4-5-20250929'`. - - 'openai' -> langchain-openai - - 'anthropic' -> langchain-anthropic - - 'azure_openai' -> langchain-openai - - 'azure_ai' -> langchain-azure-ai - - 'google_vertexai' -> langchain-google-vertexai - - 'google_genai' -> langchain-google-genai - - 'bedrock' -> langchain-aws - - 'bedrock_converse' -> langchain-aws - - 'cohere' -> langchain-cohere - - 'fireworks' -> langchain-fireworks - - 'together' -> langchain-together - - 'mistralai' -> langchain-mistralai - - 'huggingface' -> langchain-huggingface - - 'groq' -> langchain-groq - - 'ollama' -> langchain-ollama - - 'google_anthropic_vertex' -> langchain-google-vertexai - - 'deepseek' -> langchain-deepseek - - 'ibm' -> langchain-ibm - - 'nvidia' -> langchain-nvidia-ai-endpoints - - 'xai' -> langchain-xai - - 'perplexity' -> langchain-perplexity + You can also specify model and model provider in a single argument using + `'{model_provider}:{model}'` format, e.g. `'openai:o1'`. + model_provider: The model provider if not specified as part of the model arg + (see above). - Will attempt to infer model_provider from model if not specified. The + Supported `model_provider` values and the corresponding integration package + are: + + - `openai` -> [`langchain-openai`](https://docs.langchain.com/oss/python/integrations/providers/openai) + - `anthropic` -> [`langchain-anthropic`](https://docs.langchain.com/oss/python/integrations/providers/anthropic) + - `azure_openai` -> [`langchain-openai`](https://docs.langchain.com/oss/python/integrations/providers/openai) + - `azure_ai` -> [`langchain-azure-ai`](https://docs.langchain.com/oss/python/integrations/providers/microsoft) + - `google_vertexai` -> [`langchain-google-vertexai`](https://docs.langchain.com/oss/python/integrations/providers/google) + - `google_genai` -> [`langchain-google-genai`](https://docs.langchain.com/oss/python/integrations/providers/google) + - `bedrock` -> [`langchain-aws`](https://docs.langchain.com/oss/python/integrations/providers/aws) + - `bedrock_converse` -> [`langchain-aws`](https://docs.langchain.com/oss/python/integrations/providers/aws) + - `cohere` -> [`langchain-cohere`](https://docs.langchain.com/oss/python/integrations/providers/cohere) + - `fireworks` -> [`langchain-fireworks`](https://docs.langchain.com/oss/python/integrations/providers/fireworks) + - `together` -> [`langchain-together`](https://docs.langchain.com/oss/python/integrations/providers/together) + - `mistralai` -> [`langchain-mistralai`](https://docs.langchain.com/oss/python/integrations/providers/mistralai) + - `huggingface` -> [`langchain-huggingface`](https://docs.langchain.com/oss/python/integrations/providers/huggingface) + - `groq` -> [`langchain-groq`](https://docs.langchain.com/oss/python/integrations/providers/groq) + - `ollama` -> [`langchain-ollama`](https://docs.langchain.com/oss/python/integrations/providers/ollama) + - `google_anthropic_vertex` -> [`langchain-google-vertexai`](https://docs.langchain.com/oss/python/integrations/providers/google) + - `deepseek` -> [`langchain-deepseek`](https://docs.langchain.com/oss/python/integrations/providers/deepseek) + - `ibm` -> [`langchain-ibm`](https://docs.langchain.com/oss/python/integrations/providers/deepseek) + - `nvidia` -> [`langchain-nvidia-ai-endpoints`](https://docs.langchain.com/oss/python/integrations/providers/nvidia) + - `xai` -> [`langchain-xai`](https://docs.langchain.com/oss/python/integrations/providers/xai) + - `perplexity` -> [`langchain-perplexity`](https://docs.langchain.com/oss/python/integrations/providers/perplexity) + + Will attempt to infer `model_provider` from model if not specified. The following providers will be inferred based on these model prefixes: - - 'gpt-...' | 'o1...' | 'o3...' -> 'openai' - - 'claude...' -> 'anthropic' - - 'amazon....' -> 'bedrock' - - 'gemini...' -> 'google_vertexai' - - 'command...' -> 'cohere' - - 'accounts/fireworks...' -> 'fireworks' - - 'mistral...' -> 'mistralai' - - 'deepseek...' -> 'deepseek' - - 'grok...' -> 'xai' - - 'sonar...' -> 'perplexity' - configurable_fields: Which model parameters are - configurable: + - `gpt-...` | `o1...` | `o3...` -> `openai` + - `claude...` -> `anthropic` + - `amazon...` -> `bedrock` + - `gemini...` -> `google_vertexai` + - `command...` -> `cohere` + - `accounts/fireworks...` -> `fireworks` + - `mistral...` -> `mistralai` + - `deepseek...` -> `deepseek` + - `grok...` -> `xai` + - `sonar...` -> `perplexity` + configurable_fields: Which model parameters are configurable at runtime: - - None: No configurable fields. - - "any": All fields are configurable. *See Security Note below.* - - Union[List[str], Tuple[str, ...]]: Specified fields are configurable. + - `None`: No configurable fields (i.e., a fixed model). + - `'any'`: All fields are configurable. **See security note below.** + - `list[str] | Tuple[str, ...]`: Specified fields are configurable. - Fields are assumed to have config_prefix stripped if there is a - config_prefix. If model is specified, then defaults to None. If model is - not specified, then defaults to `("model", "model_provider")`. + Fields are assumed to have `config_prefix` stripped if a `config_prefix` is + specified. - **Security Note**: Setting `configurable_fields="any"` means fields like - api_key, base_url, etc. can be altered at runtime, potentially redirecting - model requests to a different service/user. Make sure that if you're - accepting untrusted configurations that you enumerate the - `configurable_fields=(...)` explicitly. + If `model` is specified, then defaults to `None`. - config_prefix: If config_prefix is a non-empty string then model will be - configurable at runtime via the - `config["configurable"]["{config_prefix}_{param}"]` keys. If - config_prefix is an empty string then model will be configurable via + If `model` is not specified, then defaults to `("model", "model_provider")`. + + !!! warning "Security note" + Setting `configurable_fields="any"` means fields like `api_key`, + `base_url`, etc., can be altered at runtime, potentially redirecting + model requests to a different service/user. + + Make sure that if you're accepting untrusted configurations that you + enumerate the `configurable_fields=(...)` explicitly. + + config_prefix: Optional prefix for configuration keys. + + Useful when you have multiple configurable models in the same application. + + If `'config_prefix'` is a non-empty string then `model` will be configurable + at runtime via the `config["configurable"]["{config_prefix}_{param}"]` keys. + See examples below. + + If `'config_prefix'` is an empty string then model will be configurable via `config["configurable"]["{param}"]`. - kwargs: Additional model-specific keyword args to pass to - `<>.__init__(model=model_name, **kwargs)`. Examples - include: - * temperature: Model temperature. - * max_tokens: Max output tokens. - * timeout: The maximum time (in seconds) to wait for a response from the model - before canceling the request. - * max_retries: The maximum number of attempts the system will make to resend a - request if it fails due to issues like network timeouts or rate limits. - * base_url: The URL of the API endpoint where requests are sent. - * rate_limiter: A `BaseRateLimiter` to space out requests to avoid exceeding - rate limits. + **kwargs: Additional model-specific keyword args to pass to the underlying + chat model's `__init__` method. Common parameters include: + + - `temperature`: Model temperature for controlling randomness. + - `max_tokens`: Maximum number of output tokens. + - `timeout`: Maximum time (in seconds) to wait for a response. + - `max_retries`: Maximum number of retry attempts for failed requests. + - `base_url`: Custom API endpoint URL. + - `rate_limiter`: A + [`BaseRateLimiter`][langchain_core.rate_limiters.BaseRateLimiter] + instance to control request rate. + + Refer to the specific model provider's + [integration reference](https://reference.langchain.com/python/integrations/) + for all available parameters. Returns: - A BaseChatModel corresponding to the model_name and model_provider specified if - configurability is inferred to be False. If configurable, a chat model emulator - that initializes the underlying model at runtime once a config is passed in. + A `BaseChatModel` corresponding to the `model_name` and `model_provider` + specified if configurability is inferred to be `False`. If configurable, a + chat model emulator that initializes the underlying model at runtime once a + config is passed in. Raises: - ValueError: If model_provider cannot be inferred or isn't supported. + ValueError: If `model_provider` cannot be inferred or isn't supported. ImportError: If the model provider integration package is not installed. - ???+ note "Init non-configurable model" + ???+ example "Initialize a non-configurable model" ```python # pip install langchain langchain-openai langchain-anthropic langchain-google-vertexai + from langchain.chat_models import init_chat_model o3_mini = init_chat_model("openai:o3-mini", temperature=0) - claude_sonnet = init_chat_model("anthropic:claude-3-5-sonnet-latest", temperature=0) - gemini_2_flash = init_chat_model("google_vertexai:gemini-2.5-flash", temperature=0) + claude_sonnet = init_chat_model("anthropic:claude-sonnet-4-5-20250929", temperature=0) + gemini_2-5_flash = init_chat_model("google_vertexai:gemini-2.5-flash", temperature=0) o3_mini.invoke("what's your name") claude_sonnet.invoke("what's your name") - gemini_2_flash.invoke("what's your name") + gemini_2-5_flash.invoke("what's your name") ``` - ??? note "Partially configurable model with no default" + ??? example "Partially configurable model with no default" ```python # pip install langchain langchain-openai langchain-anthropic + from langchain.chat_models import init_chat_model - # We don't need to specify configurable=True if a model isn't specified. + # (We don't need to specify configurable=True if a model isn't specified.) configurable_model = init_chat_model(temperature=0) configurable_model.invoke("what's your name", config={"configurable": {"model": "gpt-4o"}}) - # GPT-4o response + # Use GPT-4o to generate the response configurable_model.invoke( - "what's your name", config={"configurable": {"model": "claude-3-5-sonnet-latest"}} + "what's your name", + config={"configurable": {"model": "claude-sonnet-4-5-20250929"}}, ) - # claude-3.5 sonnet response ``` - ??? note "Fully configurable model with a default" + ??? example "Fully configurable model with a default" ```python # pip install langchain langchain-openai langchain-anthropic + from langchain.chat_models import init_chat_model configurable_model_with_default = init_chat_model( "openai:gpt-4o", - configurable_fields="any", # this allows us to configure other params like temperature, max_tokens, etc at runtime. + configurable_fields="any", # This allows us to configure other params like temperature, max_tokens, etc at runtime. config_prefix="foo", temperature=0, ) configurable_model_with_default.invoke("what's your name") - # GPT-4o response with temperature 0 + # GPT-4o response with temperature 0 (as set in default) configurable_model_with_default.invoke( "what's your name", config={ "configurable": { - "foo_model": "anthropic:claude-3-5-sonnet-latest", + "foo_model": "anthropic:claude-sonnet-4-5-20250929", "foo_temperature": 0.6, } }, ) - # Claude-3.5 sonnet response with temperature 0.6 + # Override default to use Sonnet 4.5 with temperature 0.6 to generate response ``` - ??? note "Bind tools to a configurable model" + ??? example "Bind tools to a configurable model" - You can call any ChatModel declarative methods on a configurable model in the - same way that you would with a normal model. + You can call any chat model declarative methods on a configurable model in the + same way that you would with a normal model: ```python # pip install langchain langchain-openai langchain-anthropic + from langchain.chat_models import init_chat_model from pydantic import BaseModel, Field @@ -252,39 +277,24 @@ def init_chat_model( "gpt-4o", configurable_fields=("model", "model_provider"), temperature=0 ) - configurable_model_with_tools = configurable_model.bind_tools([GetWeather, GetPopulation]) + configurable_model_with_tools = configurable_model.bind_tools( + [ + GetWeather, + GetPopulation, + ] + ) configurable_model_with_tools.invoke( "Which city is hotter today and which is bigger: LA or NY?" ) - # GPT-4o response with tool calls + # Use GPT-4o configurable_model_with_tools.invoke( "Which city is hotter today and which is bigger: LA or NY?", - config={"configurable": {"model": "claude-3-5-sonnet-latest"}}, + config={"configurable": {"model": "claude-sonnet-4-5-20250929"}}, ) - # Claude-3.5 sonnet response with tools + # Use Sonnet 4.5 ``` - !!! version-added "Added in version 0.2.7" - - !!! warning "Behavior changed in 0.2.8" - Support for `configurable_fields` and `config_prefix` added. - - !!! warning "Behavior changed in 0.2.12" - Support for Ollama via langchain-ollama package added - (langchain_ollama.ChatOllama). Previously, - the now-deprecated langchain-community version of Ollama was imported - (langchain_community.chat_models.ChatOllama). - - Support for AWS Bedrock models via the Converse API added - (model_provider="bedrock_converse"). - - !!! warning "Behavior changed in 0.3.5" - Out of beta. - - !!! warning "Behavior changed in 0.3.19" - Support for Deepseek, IBM, Nvidia, and xAI models added. - """ # noqa: E501 if not model and not configurable_fields: configurable_fields = ("model", "model_provider") @@ -592,7 +602,7 @@ class _ConfigurableModel(Runnable[LanguageModelInput, Any]): config: RunnableConfig | None = None, **kwargs: Any, ) -> _ConfigurableModel: - """Bind config to a Runnable, returning a new Runnable.""" + """Bind config to a `Runnable`, returning a new `Runnable`.""" config = RunnableConfig(**(config or {}), **cast("RunnableConfig", kwargs)) model_params = self._model_params(config) remaining_config = {k: v for k, v in config.items() if k != "configurable"} @@ -622,10 +632,7 @@ class _ConfigurableModel(Runnable[LanguageModelInput, Any]): @property def InputType(self) -> TypeAlias: """Get the input type for this `Runnable`.""" - from langchain_core.prompt_values import ( - ChatPromptValueConcrete, - StringPromptValue, - ) + from langchain_core.prompt_values import ChatPromptValueConcrete, StringPromptValue # This is a version of LanguageModelInput which replaces the abstract # base class BaseMessage with a union of its subclasses, which makes diff --git a/libs/langchain_v1/langchain/documents/__init__.py b/libs/langchain_v1/langchain/documents/__init__.py deleted file mode 100644 index dcc14ce68b5..00000000000 --- a/libs/langchain_v1/langchain/documents/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Document.""" - -from langchain_core.documents import Document - -__all__ = [ - "Document", -] diff --git a/libs/langchain_v1/langchain/embeddings/__init__.py b/libs/langchain_v1/langchain/embeddings/__init__.py index 84c453a9f60..6943b1cd7cc 100644 --- a/libs/langchain_v1/langchain/embeddings/__init__.py +++ b/libs/langchain_v1/langchain/embeddings/__init__.py @@ -1,12 +1,22 @@ -"""Embeddings.""" +"""Embeddings models. + +!!! warning "Reference docs" + This page contains **reference documentation** for Embeddings. See + [the docs](https://docs.langchain.com/oss/python/langchain/retrieval#embedding-models) + for conceptual guides, tutorials, and examples on using Embeddings. + +!!! warning "Modules moved" + With the release of `langchain 1.0.0`, several embeddings modules were moved to + `langchain-classic`, such as `CacheBackedEmbeddings` and all community + embeddings. See [list](https://github.com/langchain-ai/langchain/blob/bdf1cd383ce36dc18381a3bf3fb0a579337a32b5/libs/langchain/langchain/embeddings/__init__.py) + of moved modules to inform your migration. +""" from langchain_core.embeddings import Embeddings from langchain.embeddings.base import init_embeddings -from langchain.embeddings.cache import CacheBackedEmbeddings __all__ = [ - "CacheBackedEmbeddings", "Embeddings", "init_embeddings", ] diff --git a/libs/langchain_v1/langchain/embeddings/base.py b/libs/langchain_v1/langchain/embeddings/base.py index 5a5ed45712a..97b5c62a2ef 100644 --- a/libs/langchain_v1/langchain/embeddings/base.py +++ b/libs/langchain_v1/langchain/embeddings/base.py @@ -126,35 +126,55 @@ def init_embeddings( provider: str | None = None, **kwargs: Any, ) -> Embeddings: - """Initialize an embeddings model from a model name and optional provider. + """Initialize an embedding model from a model name and optional provider. !!! note - Must have the integration package corresponding to the model provider - installed. + Requires the integration package for the chosen model provider to be installed. + + See the `model_provider` parameter below for specific package names + (e.g., `pip install langchain-openai`). + + Refer to the [provider integration's API reference](https://docs.langchain.com/oss/python/integrations/providers) + for supported model parameters to use as `**kwargs`. Args: - model: Name of the model to use. Can be either: - - A model string like "openai:text-embedding-3-small" - - Just the model name if provider is specified - provider: Optional explicit provider name. If not specified, - will attempt to parse from the model string. Supported providers - and their required packages: + model: The name of the model, e.g. `'openai:text-embedding-3-small'`. - {_get_provider_list()} + You can also specify model and model provider in a single argument using + `'{model_provider}:{model}'` format, e.g. `'openai:text-embedding-3-small'`. + provider: The model provider if not specified as part of the model arg + (see above). + + Supported `provider` values and the corresponding integration package + are: + + - `openai` -> [`langchain-openai`](https://docs.langchain.com/oss/python/integrations/providers/openai) + - `azure_openai` -> [`langchain-openai`](https://docs.langchain.com/oss/python/integrations/providers/openai) + - `bedrock` -> [`langchain-aws`](https://docs.langchain.com/oss/python/integrations/providers/aws) + - `cohere` -> [`langchain-cohere`](https://docs.langchain.com/oss/python/integrations/providers/cohere) + - `google_vertexai` -> [`langchain-google-vertexai`](https://docs.langchain.com/oss/python/integrations/providers/google) + - `huggingface` -> [`langchain-huggingface`](https://docs.langchain.com/oss/python/integrations/providers/huggingface) + - `mistraiai` -> [`langchain-mistralai`](https://docs.langchain.com/oss/python/integrations/providers/mistralai) + - `ollama` -> [`langchain-ollama`](https://docs.langchain.com/oss/python/integrations/providers/ollama) **kwargs: Additional model-specific parameters passed to the embedding model. - These vary by provider, see the provider-specific documentation for details. + + These vary by provider. Refer to the specific model provider's + [integration reference](https://reference.langchain.com/python/integrations/) + for all available parameters. Returns: - An Embeddings instance that can generate embeddings for text. + An `Embeddings` instance that can generate embeddings for text. Raises: ValueError: If the model provider is not supported or cannot be determined ImportError: If the required provider package is not installed - ???+ note "Example Usage" + ???+ example ```python + # pip install langchain langchain-openai + # Using a model string model = init_embeddings("openai:text-embedding-3-small") model.embed_query("Hello, world!") @@ -167,7 +187,7 @@ def init_embeddings( model = init_embeddings("openai:text-embedding-3-small", api_key="sk-...") ``` - !!! version-added "Added in version 0.3.9" + !!! version-added "Added in `langchain` 0.3.9" """ if not model: diff --git a/libs/langchain_v1/langchain/embeddings/cache.py b/libs/langchain_v1/langchain/embeddings/cache.py deleted file mode 100644 index 3096d340cf2..00000000000 --- a/libs/langchain_v1/langchain/embeddings/cache.py +++ /dev/null @@ -1,361 +0,0 @@ -"""Module contains code for a cache backed embedder. - -The cache backed embedder is a wrapper around an embedder that caches -embeddings in a key-value store. The cache is used to avoid recomputing -embeddings for the same text. - -The text is hashed and the hash is used as the key in the cache. -""" - -from __future__ import annotations - -import hashlib -import json -import uuid -import warnings -from typing import TYPE_CHECKING, Literal, cast - -from langchain_core.embeddings import Embeddings -from langchain_core.utils.iter import batch_iterate - -from langchain.storage.encoder_backed import EncoderBackedStore - -if TYPE_CHECKING: - from collections.abc import Callable, Sequence - - from langchain_core.stores import BaseStore, ByteStore - -NAMESPACE_UUID = uuid.UUID(int=1985) - - -def _sha1_hash_to_uuid(text: str) -> uuid.UUID: - """Return a UUID derived from *text* using SHA-1 (deterministic). - - Deterministic and fast, **but not collision-resistant**. - - A malicious attacker could try to create two different texts that hash to the same - UUID. This may not necessarily be an issue in the context of caching embeddings, - but new applications should swap this out for a stronger hash function like - xxHash, BLAKE2 or SHA-256, which are collision-resistant. - """ - sha1_hex = hashlib.sha1(text.encode("utf-8"), usedforsecurity=False).hexdigest() - # Embed the hex string in `uuid5` to obtain a valid UUID. - return uuid.uuid5(NAMESPACE_UUID, sha1_hex) - - -def _make_default_key_encoder(namespace: str, algorithm: str) -> Callable[[str], str]: - """Create a default key encoder function. - - Args: - namespace: Prefix that segregates keys from different embedding models. - algorithm: - * `'sha1'` - fast but not collision-resistant - * `'blake2b'` - cryptographically strong, faster than SHA-1 - * `'sha256'` - cryptographically strong, slower than SHA-1 - * `'sha512'` - cryptographically strong, slower than SHA-1 - - Returns: - A function that encodes a key using the specified algorithm. - """ - if algorithm == "sha1": - _warn_about_sha1_encoder() - - def _key_encoder(key: str) -> str: - """Encode a key using the specified algorithm.""" - if algorithm == "sha1": - return f"{namespace}{_sha1_hash_to_uuid(key)}" - if algorithm == "blake2b": - return f"{namespace}{hashlib.blake2b(key.encode('utf-8')).hexdigest()}" - if algorithm == "sha256": - return f"{namespace}{hashlib.sha256(key.encode('utf-8')).hexdigest()}" - if algorithm == "sha512": - return f"{namespace}{hashlib.sha512(key.encode('utf-8')).hexdigest()}" - msg = f"Unsupported algorithm: {algorithm}" - raise ValueError(msg) - - return _key_encoder - - -def _value_serializer(value: Sequence[float]) -> bytes: - """Serialize a value.""" - return json.dumps(value).encode() - - -def _value_deserializer(serialized_value: bytes) -> list[float]: - """Deserialize a value.""" - return cast("list[float]", json.loads(serialized_value.decode())) - - -# The warning is global; track emission, so it appears only once. -_warned_about_sha1: bool = False - - -def _warn_about_sha1_encoder() -> None: - """Emit a one-time warning about SHA-1 collision weaknesses.""" - global _warned_about_sha1 # noqa: PLW0603 - if not _warned_about_sha1: - warnings.warn( - "Using default key encoder: SHA-1 is *not* collision-resistant. " - "While acceptable for most cache scenarios, a motivated attacker " - "can craft two different payloads that map to the same cache key. " - "If that risk matters in your environment, supply a stronger " - "encoder (e.g. SHA-256 or BLAKE2) via the `key_encoder` argument. " - "If you change the key encoder, consider also creating a new cache, " - "to avoid (the potential for) collisions with existing keys.", - category=UserWarning, - stacklevel=2, - ) - _warned_about_sha1 = True - - -class CacheBackedEmbeddings(Embeddings): - """Interface for caching results from embedding models. - - The interface allows works with any store that implements - the abstract store interface accepting keys of type str and values of list of - floats. - - If need be, the interface can be extended to accept other implementations - of the value serializer and deserializer, as well as the key encoder. - - Note that by default only document embeddings are cached. To cache query - embeddings too, pass in a query_embedding_store to constructor. - - Examples: - ```python - from langchain.embeddings import CacheBackedEmbeddings - from langchain.storage import LocalFileStore - from langchain_community.embeddings import OpenAIEmbeddings - - store = LocalFileStore("./my_cache") - - underlying_embedder = OpenAIEmbeddings() - embedder = CacheBackedEmbeddings.from_bytes_store( - underlying_embedder, store, namespace=underlying_embedder.model - ) - - # Embedding is computed and cached - embeddings = embedder.embed_documents(["hello", "goodbye"]) - - # Embeddings are retrieved from the cache, no computation is done - embeddings = embedder.embed_documents(["hello", "goodbye"]) - ``` - """ - - def __init__( - self, - underlying_embeddings: Embeddings, - document_embedding_store: BaseStore[str, list[float]], - *, - batch_size: int | None = None, - query_embedding_store: BaseStore[str, list[float]] | None = None, - ) -> None: - """Initialize the embedder. - - Args: - underlying_embeddings: the embedder to use for computing embeddings. - document_embedding_store: The store to use for caching document embeddings. - batch_size: The number of documents to embed between store updates. - query_embedding_store: The store to use for caching query embeddings. - If `None`, query embeddings are not cached. - """ - super().__init__() - self.document_embedding_store = document_embedding_store - self.query_embedding_store = query_embedding_store - self.underlying_embeddings = underlying_embeddings - self.batch_size = batch_size - - def embed_documents(self, texts: list[str]) -> list[list[float]]: - """Embed a list of texts. - - The method first checks the cache for the embeddings. - If the embeddings are not found, the method uses the underlying embedder - to embed the documents and stores the results in the cache. - - Args: - texts: A list of texts to embed. - - Returns: - A list of embeddings for the given texts. - """ - vectors: list[list[float] | None] = self.document_embedding_store.mget( - texts, - ) - all_missing_indices: list[int] = [i for i, vector in enumerate(vectors) if vector is None] - - for missing_indices in batch_iterate(self.batch_size, all_missing_indices): - missing_texts = [texts[i] for i in missing_indices] - missing_vectors = self.underlying_embeddings.embed_documents(missing_texts) - self.document_embedding_store.mset( - list(zip(missing_texts, missing_vectors, strict=False)), - ) - for index, updated_vector in zip(missing_indices, missing_vectors, strict=False): - vectors[index] = updated_vector - - return cast( - "list[list[float]]", - vectors, - ) # Nones should have been resolved by now - - async def aembed_documents(self, texts: list[str]) -> list[list[float]]: - """Embed a list of texts. - - The method first checks the cache for the embeddings. - If the embeddings are not found, the method uses the underlying embedder - to embed the documents and stores the results in the cache. - - Args: - texts: A list of texts to embed. - - Returns: - A list of embeddings for the given texts. - """ - vectors: list[list[float] | None] = await self.document_embedding_store.amget(texts) - all_missing_indices: list[int] = [i for i, vector in enumerate(vectors) if vector is None] - - # batch_iterate supports None batch_size which returns all elements at once - # as a single batch. - for missing_indices in batch_iterate(self.batch_size, all_missing_indices): - missing_texts = [texts[i] for i in missing_indices] - missing_vectors = await self.underlying_embeddings.aembed_documents( - missing_texts, - ) - await self.document_embedding_store.amset( - list(zip(missing_texts, missing_vectors, strict=False)), - ) - for index, updated_vector in zip(missing_indices, missing_vectors, strict=False): - vectors[index] = updated_vector - - return cast( - "list[list[float]]", - vectors, - ) # Nones should have been resolved by now - - def embed_query(self, text: str) -> list[float]: - """Embed query text. - - By default, this method does not cache queries. To enable caching, set the - `cache_query` parameter to `True` when initializing the embedder. - - Args: - text: The text to embed. - - Returns: - The embedding for the given text. - """ - if not self.query_embedding_store: - return self.underlying_embeddings.embed_query(text) - - (cached,) = self.query_embedding_store.mget([text]) - if cached is not None: - return cached - - vector = self.underlying_embeddings.embed_query(text) - self.query_embedding_store.mset([(text, vector)]) - return vector - - async def aembed_query(self, text: str) -> list[float]: - """Embed query text. - - By default, this method does not cache queries. To enable caching, set the - `cache_query` parameter to `True` when initializing the embedder. - - Args: - text: The text to embed. - - Returns: - The embedding for the given text. - """ - if not self.query_embedding_store: - return await self.underlying_embeddings.aembed_query(text) - - (cached,) = await self.query_embedding_store.amget([text]) - if cached is not None: - return cached - - vector = await self.underlying_embeddings.aembed_query(text) - await self.query_embedding_store.amset([(text, vector)]) - return vector - - @classmethod - def from_bytes_store( - cls, - underlying_embeddings: Embeddings, - document_embedding_cache: ByteStore, - *, - namespace: str = "", - batch_size: int | None = None, - query_embedding_cache: bool | ByteStore = False, - key_encoder: Callable[[str], str] | Literal["sha1", "blake2b", "sha256", "sha512"] = "sha1", - ) -> CacheBackedEmbeddings: - """On-ramp that adds the necessary serialization and encoding to the store. - - Args: - underlying_embeddings: The embedder to use for embedding. - document_embedding_cache: The cache to use for storing document embeddings. - namespace: The namespace to use for document cache. - This namespace is used to avoid collisions with other caches. - For example, set it to the name of the embedding model used. - batch_size: The number of documents to embed between store updates. - query_embedding_cache: The cache to use for storing query embeddings. - True to use the same cache as document embeddings. - False to not cache query embeddings. - key_encoder: Optional callable to encode keys. If not provided, - a default encoder using SHA-1 will be used. SHA-1 is not - collision-resistant, and a motivated attacker could craft two - different texts that hash to the same cache key. - - New applications should use one of the alternative encoders - or provide a custom and strong key encoder function to avoid this risk. - - If you change a key encoder in an existing cache, consider - just creating a new cache, to avoid (the potential for) - collisions with existing keys or having duplicate keys - for the same text in the cache. - - Returns: - An instance of CacheBackedEmbeddings that uses the provided cache. - """ - if isinstance(key_encoder, str): - key_encoder = _make_default_key_encoder(namespace, key_encoder) - elif callable(key_encoder): - # If a custom key encoder is provided, it should not be used with a - # namespace. - # A user can handle namespacing in directly their custom key encoder. - if namespace: - msg = ( - "Do not supply `namespace` when using a custom key_encoder; " - "add any prefixing inside the encoder itself." - ) - raise ValueError(msg) - else: - msg = ( - "key_encoder must be either 'blake2b', 'sha1', 'sha256', 'sha512' " - "or a callable that encodes keys." - ) - raise ValueError(msg) # noqa: TRY004 - - document_embedding_store = EncoderBackedStore[str, list[float]]( - document_embedding_cache, - key_encoder, - _value_serializer, - _value_deserializer, - ) - if query_embedding_cache is True: - query_embedding_store = document_embedding_store - elif query_embedding_cache is False: - query_embedding_store = None - else: - query_embedding_store = EncoderBackedStore[str, list[float]]( - query_embedding_cache, - key_encoder, - _value_serializer, - _value_deserializer, - ) - - return cls( - underlying_embeddings, - document_embedding_store, - batch_size=batch_size, - query_embedding_store=query_embedding_store, - ) diff --git a/libs/langchain_v1/langchain/messages/__init__.py b/libs/langchain_v1/langchain/messages/__init__.py index 1aecc4a3307..b757afd1f47 100644 --- a/libs/langchain_v1/langchain/messages/__init__.py +++ b/libs/langchain_v1/langchain/messages/__init__.py @@ -1,29 +1,78 @@ -"""Message types.""" +"""Message and message content types. + +Includes message types for different roles (e.g., human, AI, system), as well as types +for message content blocks (e.g., text, image, audio) and tool calls. + +!!! warning "Reference docs" + This page contains **reference documentation** for Messages. See + [the docs](https://docs.langchain.com/oss/python/langchain/messages) for conceptual + guides, tutorials, and examples on using Messages. +""" from langchain_core.messages import ( AIMessage, AIMessageChunk, + Annotation, AnyMessage, + AudioContentBlock, + Citation, + ContentBlock, + DataContentBlock, + FileContentBlock, HumanMessage, + ImageContentBlock, + InputTokenDetails, InvalidToolCall, MessageLikeRepresentation, + NonStandardAnnotation, + NonStandardContentBlock, + OutputTokenDetails, + PlainTextContentBlock, + ReasoningContentBlock, + RemoveMessage, + ServerToolCall, + ServerToolCallChunk, + ServerToolResult, SystemMessage, + TextContentBlock, ToolCall, ToolCallChunk, ToolMessage, + UsageMetadata, + VideoContentBlock, trim_messages, ) __all__ = [ "AIMessage", "AIMessageChunk", + "Annotation", "AnyMessage", + "AudioContentBlock", + "Citation", + "ContentBlock", + "DataContentBlock", + "FileContentBlock", "HumanMessage", + "ImageContentBlock", + "InputTokenDetails", "InvalidToolCall", "MessageLikeRepresentation", + "NonStandardAnnotation", + "NonStandardContentBlock", + "OutputTokenDetails", + "PlainTextContentBlock", + "ReasoningContentBlock", + "RemoveMessage", + "ServerToolCall", + "ServerToolCallChunk", + "ServerToolResult", "SystemMessage", + "TextContentBlock", "ToolCall", "ToolCallChunk", "ToolMessage", + "UsageMetadata", + "VideoContentBlock", "trim_messages", ] diff --git a/libs/langchain_v1/langchain/storage/__init__.py b/libs/langchain_v1/langchain/storage/__init__.py deleted file mode 100644 index 74680e1db9b..00000000000 --- a/libs/langchain_v1/langchain/storage/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Implementations of key-value stores and storage helpers. - -Module provides implementations of various key-value stores that conform -to a simple key-value interface. - -The primary goal of these storages is to support implementation of caching. -""" - -from langchain_core.stores import ( - InMemoryByteStore, - InMemoryStore, - InvalidKeyException, -) - -from langchain.storage.encoder_backed import EncoderBackedStore - -__all__ = [ - "EncoderBackedStore", - "InMemoryByteStore", - "InMemoryStore", - "InvalidKeyException", -] diff --git a/libs/langchain_v1/langchain/storage/encoder_backed.py b/libs/langchain_v1/langchain/storage/encoder_backed.py deleted file mode 100644 index 80e60c32a53..00000000000 --- a/libs/langchain_v1/langchain/storage/encoder_backed.py +++ /dev/null @@ -1,122 +0,0 @@ -"""Encoder-backed store implementation.""" - -from collections.abc import AsyncIterator, Callable, Iterator, Sequence -from typing import ( - Any, - TypeVar, -) - -from langchain_core.stores import BaseStore - -K = TypeVar("K") -V = TypeVar("V") - - -class EncoderBackedStore(BaseStore[K, V]): - """Wraps a store with key and value encoders/decoders. - - Examples that uses JSON for encoding/decoding: - - ```python - import json - - - def key_encoder(key: int) -> str: - return json.dumps(key) - - - def value_serializer(value: float) -> str: - return json.dumps(value) - - - def value_deserializer(serialized_value: str) -> float: - return json.loads(serialized_value) - - - # Create an instance of the abstract store - abstract_store = MyCustomStore() - - # Create an instance of the encoder-backed store - store = EncoderBackedStore( - store=abstract_store, - key_encoder=key_encoder, - value_serializer=value_serializer, - value_deserializer=value_deserializer, - ) - - # Use the encoder-backed store methods - store.mset([(1, 3.14), (2, 2.718)]) - values = store.mget([1, 2]) # Retrieves [3.14, 2.718] - store.mdelete([1, 2]) # Deletes the keys 1 and 2 - ``` - """ - - def __init__( - self, - store: BaseStore[str, Any], - key_encoder: Callable[[K], str], - value_serializer: Callable[[V], bytes], - value_deserializer: Callable[[Any], V], - ) -> None: - """Initialize an EncodedStore.""" - self.store = store - self.key_encoder = key_encoder - self.value_serializer = value_serializer - self.value_deserializer = value_deserializer - - def mget(self, keys: Sequence[K]) -> list[V | None]: - """Get the values associated with the given keys.""" - encoded_keys: list[str] = [self.key_encoder(key) for key in keys] - values = self.store.mget(encoded_keys) - return [self.value_deserializer(value) if value is not None else value for value in values] - - async def amget(self, keys: Sequence[K]) -> list[V | None]: - """Get the values associated with the given keys.""" - encoded_keys: list[str] = [self.key_encoder(key) for key in keys] - values = await self.store.amget(encoded_keys) - return [self.value_deserializer(value) if value is not None else value for value in values] - - def mset(self, key_value_pairs: Sequence[tuple[K, V]]) -> None: - """Set the values for the given keys.""" - encoded_pairs = [ - (self.key_encoder(key), self.value_serializer(value)) for key, value in key_value_pairs - ] - self.store.mset(encoded_pairs) - - async def amset(self, key_value_pairs: Sequence[tuple[K, V]]) -> None: - """Set the values for the given keys.""" - encoded_pairs = [ - (self.key_encoder(key), self.value_serializer(value)) for key, value in key_value_pairs - ] - await self.store.amset(encoded_pairs) - - def mdelete(self, keys: Sequence[K]) -> None: - """Delete the given keys and their associated values.""" - encoded_keys = [self.key_encoder(key) for key in keys] - self.store.mdelete(encoded_keys) - - async def amdelete(self, keys: Sequence[K]) -> None: - """Delete the given keys and their associated values.""" - encoded_keys = [self.key_encoder(key) for key in keys] - await self.store.amdelete(encoded_keys) - - def yield_keys( - self, - *, - prefix: str | None = None, - ) -> Iterator[K] | Iterator[str]: - """Get an iterator over keys that match the given prefix.""" - # For the time being this does not return K, but str - # it's for debugging purposes. Should fix this. - yield from self.store.yield_keys(prefix=prefix) - - async def ayield_keys( - self, - *, - prefix: str | None = None, - ) -> AsyncIterator[K] | AsyncIterator[str]: - """Get an iterator over keys that match the given prefix.""" - # For the time being this does not return K, but str - # it's for debugging purposes. Should fix this. - async for key in self.store.ayield_keys(prefix=prefix): - yield key diff --git a/libs/langchain_v1/langchain/storage/exceptions.py b/libs/langchain_v1/langchain/storage/exceptions.py deleted file mode 100644 index 74d2a43c531..00000000000 --- a/libs/langchain_v1/langchain/storage/exceptions.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Store exceptions.""" - -from langchain_core.stores import InvalidKeyException - -__all__ = ["InvalidKeyException"] diff --git a/libs/langchain_v1/langchain/storage/in_memory.py b/libs/langchain_v1/langchain/storage/in_memory.py deleted file mode 100644 index 296bc19a02d..00000000000 --- a/libs/langchain_v1/langchain/storage/in_memory.py +++ /dev/null @@ -1,13 +0,0 @@ -"""In memory store that is not thread safe and has no eviction policy. - -This is a simple implementation of the BaseStore using a dictionary that is useful -primarily for unit testing purposes. -""" - -from langchain_core.stores import InMemoryBaseStore, InMemoryByteStore, InMemoryStore - -__all__ = [ - "InMemoryBaseStore", - "InMemoryByteStore", - "InMemoryStore", -] diff --git a/libs/langchain_v1/langchain/tools/__init__.py b/libs/langchain_v1/langchain/tools/__init__.py index 8f1fa554f53..92aca31aec1 100644 --- a/libs/langchain_v1/langchain/tools/__init__.py +++ b/libs/langchain_v1/langchain/tools/__init__.py @@ -1,4 +1,10 @@ -"""Tools.""" +"""Tools. + +!!! warning "Reference docs" + This page contains **reference documentation** for Tools. See + [the docs](https://docs.langchain.com/oss/python/langchain/tools) for conceptual + guides, tutorials, and examples on using Tools. +""" from langchain_core.tools import ( BaseTool, @@ -8,11 +14,7 @@ from langchain_core.tools import ( tool, ) -from langchain.tools.tool_node import ( - InjectedState, - InjectedStore, - ToolNode, -) +from langchain.tools.tool_node import InjectedState, InjectedStore, ToolRuntime __all__ = [ "BaseTool", @@ -21,6 +23,6 @@ __all__ = [ "InjectedToolArg", "InjectedToolCallId", "ToolException", - "ToolNode", + "ToolRuntime", "tool", ] diff --git a/libs/langchain_v1/langchain/tools/tool_node.py b/libs/langchain_v1/langchain/tools/tool_node.py index d856aef94a1..4474c8ba935 100644 --- a/libs/langchain_v1/langchain/tools/tool_node.py +++ b/libs/langchain_v1/langchain/tools/tool_node.py @@ -1,1487 +1,20 @@ -"""Tool execution node for LangGraph workflows. +"""Utils file included for backwards compat imports.""" -This module provides prebuilt functionality for executing tools in LangGraph. - -Tools are functions that models can call to interact with external systems, -APIs, databases, or perform computations. - -The module implements design patterns for: -- Parallel execution of multiple tool calls for efficiency -- Robust error handling with customizable error messages -- State injection for tools that need access to graph state -- Store injection for tools that need persistent storage -- Command-based state updates for advanced control flow - -Key Components: - ToolNode: Main class for executing tools in LangGraph workflows - InjectedState: Annotation for injecting graph state into tools - InjectedStore: Annotation for injecting persistent store into tools - tools_condition: Utility function for conditional routing based on tool calls - -Typical Usage: - ```python - from langchain_core.tools import tool - from langchain.tools import ToolNode - - - @tool - def my_tool(x: int) -> str: - return f"Result: {x}" - - - tool_node = ToolNode([my_tool]) - ``` -""" - -from __future__ import annotations - -import asyncio -import inspect -import json -from collections.abc import Callable -from copy import copy, deepcopy -from dataclasses import dataclass, replace -from types import UnionType -from typing import ( - TYPE_CHECKING, - Annotated, - Any, - Literal, - Optional, - TypedDict, - Union, - cast, - get_args, - get_origin, - get_type_hints, +from langgraph.prebuilt import InjectedState, InjectedStore, ToolRuntime +from langgraph.prebuilt.tool_node import ( + ToolCallRequest, + ToolCallWithContext, + ToolCallWrapper, +) +from langgraph.prebuilt.tool_node import ( + ToolNode as _ToolNode, # noqa: F401 ) -from langchain_core.messages import ( - AIMessage, - AnyMessage, - RemoveMessage, - ToolCall, - ToolMessage, - convert_to_messages, -) -from langchain_core.runnables.config import ( - get_config_list, - get_executor_for_config, -) -from langchain_core.tools import BaseTool, InjectedToolArg -from langchain_core.tools import tool as create_tool -from langchain_core.tools.base import ( - TOOL_MESSAGE_BLOCK_TYPES, - get_all_basemodel_annotations, -) -from langgraph._internal._runnable import RunnableCallable -from langgraph.errors import GraphBubbleUp -from langgraph.graph.message import REMOVE_ALL_MESSAGES -from langgraph.runtime import get_runtime -from langgraph.types import Command, Send -from pydantic import BaseModel, ValidationError - -if TYPE_CHECKING: - from collections.abc import Sequence - - from langchain_core.runnables import RunnableConfig - from langgraph.store.base import BaseStore - -INVALID_TOOL_NAME_ERROR_TEMPLATE = ( - "Error: {requested_tool} is not a valid tool, try one of [{available_tools}]." -) -TOOL_CALL_ERROR_TEMPLATE = "Error: {error}\n Please fix your mistakes." -TOOL_EXECUTION_ERROR_TEMPLATE = ( - "Error executing tool '{tool_name}' with kwargs {tool_kwargs} with error:\n" - " {error}\n" - " Please fix the error and try again." -) -TOOL_INVOCATION_ERROR_TEMPLATE = ( - "Error invoking tool '{tool_name}' with kwargs {tool_kwargs} with error:\n" - " {error}\n" - " Please fix the error and try again." -) - - -@dataclass() -class ToolCallRequest: - """Tool execution request passed to tool call interceptors. - - Attributes: - tool_call: Tool call dict with name, args, and id from model output. - tool: BaseTool instance to be invoked. - state: Agent state (dict, list, or BaseModel). - runtime: LangGraph runtime context (optional, None if outside graph). - """ - - tool_call: ToolCall - tool: BaseTool - state: Any - runtime: Any - - -ToolCallHandler = Callable[ - [ToolCallRequest, Callable[[ToolCallRequest], ToolMessage | Command]], - ToolMessage | Command, +__all__ = [ + "InjectedState", + "InjectedStore", + "ToolCallRequest", + "ToolCallWithContext", + "ToolCallWrapper", + "ToolRuntime", ] -"""Handler-based tool call interceptor with multi-call support. - -Handler receives: - request: ToolCallRequest with tool_call, tool, state, and runtime. - execute: Callable to execute the tool (CAN BE CALLED MULTIPLE TIMES). - -Returns: - ToolMessage or Command (the final result). - -The execute callable can be invoked multiple times for retry logic, -with potentially modified requests each time. Each call to execute -is independent and stateless. - -Note: - When implementing middleware for `create_agent`, use - `AgentMiddleware.wrap_tool_call` which provides properly typed - state parameter for better type safety. - -Examples: - Passthrough (execute once): - - def handler(request, execute): - return execute(request) - - Modify request before execution: - - def handler(request, execute): - request.tool_call["args"]["value"] *= 2 - return execute(request) - - Retry on error (execute multiple times): - - def handler(request, execute): - for attempt in range(3): - try: - result = execute(request) - if is_valid(result): - return result - except Exception: - if attempt == 2: - raise - return result - - Conditional retry based on response: - - def handler(request, execute): - for attempt in range(3): - result = execute(request) - if isinstance(result, ToolMessage) and result.status != "error": - return result - if attempt < 2: - continue - return result - - Cache/short-circuit without calling execute: - - def handler(request, execute): - if cached := get_cache(request): - return ToolMessage(content=cached, tool_call_id=request.tool_call["id"]) - result = execute(request) - save_cache(request, result) - return result -""" - - -class ToolCallWithContext(TypedDict): - """ToolCall with additional context for graph state. - - This is an internal data structure meant to help the ToolNode accept - tool calls with additional context (e.g. state) when dispatched using the - Send API. - - The Send API is used in create_agent to distribute tool calls in parallel - and support human-in-the-loop workflows where graph execution may be paused - for an indefinite time. - """ - - tool_call: ToolCall - __type: Literal["tool_call_with_context"] - """Type to parameterize the payload. - - Using "__" as a prefix to be defensive against potential name collisions with - regular user state. - """ - state: Any - """The state is provided as additional context.""" - - -def msg_content_output(output: Any) -> str | list[dict]: - """Convert tool output to ToolMessage content format. - - Handles str, list[dict] (content blocks), and arbitrary objects by attempting - JSON serialization with fallback to str(). - - Args: - output: Tool execution output of any type. - - Returns: - String or list of content blocks suitable for ToolMessage.content. - """ - if isinstance(output, str) or ( - isinstance(output, list) - and all(isinstance(x, dict) and x.get("type") in TOOL_MESSAGE_BLOCK_TYPES for x in output) - ): - return output - # Technically a list of strings is also valid message content, but it's - # not currently well tested that all chat models support this. - # And for backwards compatibility we want to make sure we don't break - # any existing ToolNode usage. - try: - return json.dumps(output, ensure_ascii=False) - except Exception: # noqa: BLE001 - return str(output) - - -class ToolInvocationError(Exception): - """Exception raised when a tool invocation fails due to invalid arguments.""" - - def __init__( - self, tool_name: str, source: ValidationError, tool_kwargs: dict[str, Any] - ) -> None: - """Initialize the ToolInvocationError. - - Args: - tool_name: The name of the tool that failed. - source: The exception that occurred. - tool_kwargs: The keyword arguments that were passed to the tool. - """ - self.message = TOOL_INVOCATION_ERROR_TEMPLATE.format( - tool_name=tool_name, tool_kwargs=tool_kwargs, error=source - ) - self.tool_name = tool_name - self.tool_kwargs = tool_kwargs - self.source = source - super().__init__(self.message) - - -def _default_handle_tool_errors(e: Exception) -> str: - """Default error handler for tool errors. - - If the tool is a tool invocation error, return its message. - Otherwise, raise the error. - """ - if isinstance(e, ToolInvocationError): - return e.message - raise e - - -def _handle_tool_error( - e: Exception, - *, - flag: bool | str | Callable[..., str] | type[Exception] | tuple[type[Exception], ...], -) -> str: - """Generate error message content based on exception handling configuration. - - This function centralizes error message generation logic, supporting different - error handling strategies configured via the ToolNode's handle_tool_errors - parameter. - - Args: - e: The exception that occurred during tool execution. - flag: Configuration for how to handle the error. Can be: - - bool: If `True`, use default error template - - str: Use this string as the error message - - Callable: Call this function with the exception to get error message - - tuple: Not used in this context (handled by caller) - - Returns: - A string containing the error message to include in the ToolMessage. - - Raises: - ValueError: If flag is not one of the supported types. - - Note: - The tuple case is handled by the caller through exception type checking, - not by this function directly. - """ - if isinstance(flag, (bool, tuple)) or (isinstance(flag, type) and issubclass(flag, Exception)): - content = TOOL_CALL_ERROR_TEMPLATE.format(error=repr(e)) - elif isinstance(flag, str): - content = flag - elif callable(flag): - content = flag(e) # type: ignore [assignment, call-arg] - else: - msg = ( - f"Got unexpected type of `handle_tool_error`. Expected bool, str " - f"or callable. Received: {flag}" - ) - raise ValueError(msg) - return content - - -def _infer_handled_types(handler: Callable[..., str]) -> tuple[type[Exception], ...]: - """Infer exception types handled by a custom error handler function. - - This function analyzes the type annotations of a custom error handler to determine - which exception types it's designed to handle. This enables type-safe error handling - where only specific exceptions are caught and processed by the handler. - - Args: - handler: A callable that takes an exception and returns an error message string. - The first parameter (after self/cls if present) should be type-annotated - with the exception type(s) to handle. - - Returns: - A tuple of exception types that the handler can process. Returns (Exception,) - if no specific type information is available for backward compatibility. - - Raises: - ValueError: If the handler's annotation contains non-Exception types or - if Union types contain non-Exception types. - - Note: - This function supports both single exception types and Union types for - handlers that need to handle multiple exception types differently. - """ - sig = inspect.signature(handler) - params = list(sig.parameters.values()) - if params: - # If it's a method, the first argument is typically 'self' or 'cls' - if params[0].name in ["self", "cls"] and len(params) == 2: - first_param = params[1] - else: - first_param = params[0] - - type_hints = get_type_hints(handler) - if first_param.name in type_hints: - origin = get_origin(first_param.annotation) - if origin in [Union, UnionType]: - args = get_args(first_param.annotation) - if all(issubclass(arg, Exception) for arg in args): - return tuple(args) - msg = ( - "All types in the error handler error annotation must be " - "Exception types. For example, " - "`def custom_handler(e: Union[ValueError, TypeError])`. " - f"Got '{first_param.annotation}' instead." - ) - raise ValueError(msg) - - exception_type = type_hints[first_param.name] - if Exception in exception_type.__mro__: - return (exception_type,) - msg = ( - f"Arbitrary types are not supported in the error handler " - f"signature. Please annotate the error with either a " - f"specific Exception type or a union of Exception types. " - "For example, `def custom_handler(e: ValueError)` or " - "`def custom_handler(e: Union[ValueError, TypeError])`. " - f"Got '{exception_type}' instead." - ) - raise ValueError(msg) - - # If no type information is available, return (Exception,) - # for backwards compatibility. - return (Exception,) - - -class ToolNode(RunnableCallable): - """A node for executing tools in LangGraph workflows. - - Handles tool execution patterns including function calls, state injection, - persistent storage, and control flow. Manages parallel execution, - error handling. - - Input Formats: - 1. Graph state with `messages` key that has a list of messages: - - Common representation for agentic workflows - - Supports custom messages key via `messages_key` parameter - - 2. **Message List**: `[AIMessage(..., tool_calls=[...])]` - - List of messages with tool calls in the last AIMessage - - 3. **Direct Tool Calls**: `[{"name": "tool", "args": {...}, "id": "1", "type": "tool_call"}]` - - Bypasses message parsing for direct tool execution - - For programmatic tool invocation and testing - - Output Formats: - Output format depends on input type and tool behavior: - - **For Regular tools**: - - Dict input β†’ `{"messages": [ToolMessage(...)]}` - - List input β†’ `[ToolMessage(...)]` - - **For Command tools**: - - Returns `[Command(...)]` or mixed list with regular tool outputs - - Commands can update state, trigger navigation, or send messages - - Args: - tools: A sequence of tools that can be invoked by this node. Supports: - - **BaseTool instances**: Tools with schemas and metadata - - **Plain functions**: Automatically converted to tools with inferred schemas - name: The name identifier for this node in the graph. Used for debugging - and visualization. Defaults to "tools". - tags: Optional metadata tags to associate with the node for filtering - and organization. Defaults to `None`. - handle_tool_errors: Configuration for error handling during tool execution. - Supports multiple strategies: - - - **True**: Catch all errors and return a ToolMessage with the default - error template containing the exception details. - - **str**: Catch all errors and return a ToolMessage with this custom - error message string. - - **type[Exception]**: Only catch exceptions with the specified type and - return the default error message for it. - - **tuple[type[Exception], ...]**: Only catch exceptions with the specified - types and return default error messages for them. - - **Callable[..., str]**: Catch exceptions matching the callable's signature - and return the string result of calling it with the exception. - - **False**: Disable error handling entirely, allowing exceptions to - propagate. - - Defaults to a callable that: - - catches tool invocation errors (due to invalid arguments provided by the model) and returns a descriptive error message - - ignores tool execution errors (they will be re-raised) - - messages_key: The key in the state dictionary that contains the message list. - This same key will be used for the output `ToolMessage` objects. - Defaults to "messages". - Allows custom state schemas with different message field names. - - Examples: - Basic usage: - - ```python - from langchain.tools import ToolNode - from langchain_core.tools import tool - - @tool - def calculator(a: int, b: int) -> int: - \"\"\"Add two numbers.\"\"\" - return a + b - - tool_node = ToolNode([calculator]) - ``` - - State injection: - - ```python - from typing_extensions import Annotated - from langchain.tools import InjectedState - - @tool - def context_tool(query: str, state: Annotated[dict, InjectedState]) -> str: - \"\"\"Some tool that uses state.\"\"\" - return f"Query: {query}, Messages: {len(state['messages'])}" - - tool_node = ToolNode([context_tool]) - ``` - - Error handling: - - ```python - def handle_errors(e: ValueError) -> str: - return "Invalid input provided" - - - tool_node = ToolNode([my_tool], handle_tool_errors=handle_errors) - ``` - """ # noqa: E501 - - name: str = "tools" - - def __init__( - self, - tools: Sequence[BaseTool | Callable], - *, - name: str = "tools", - tags: list[str] | None = None, - handle_tool_errors: bool - | str - | Callable[..., str] - | type[Exception] - | tuple[type[Exception], ...] = _default_handle_tool_errors, - messages_key: str = "messages", - on_tool_call: ToolCallHandler | None = None, - ) -> None: - """Initialize ToolNode with tools and configuration. - - Args: - tools: Sequence of tools to make available for execution. - name: Node name for graph identification. - tags: Optional metadata tags. - handle_tool_errors: Error handling configuration. - messages_key: State key containing messages. - on_tool_call: Generator handler to intercept tool execution. Receives - ToolCallRequest, yields requests, messages, or Commands; receives - ToolMessage or Command via .send(). Final result is last value sent to - handler. Enables retries, caching, request modification, and control flow. - """ - super().__init__(self._func, self._afunc, name=name, tags=tags, trace=False) - self._tools_by_name: dict[str, BaseTool] = {} - self._tool_to_state_args: dict[str, dict[str, str | None]] = {} - self._tool_to_store_arg: dict[str, str | None] = {} - self._handle_tool_errors = handle_tool_errors - self._messages_key = messages_key - self._on_tool_call = on_tool_call - for tool in tools: - if not isinstance(tool, BaseTool): - tool_ = create_tool(cast("type[BaseTool]", tool)) - else: - tool_ = tool - self._tools_by_name[tool_.name] = tool_ - self._tool_to_state_args[tool_.name] = _get_state_args(tool_) - self._tool_to_store_arg[tool_.name] = _get_store_arg(tool_) - - @property - def tools_by_name(self) -> dict[str, BaseTool]: - """Mapping from tool name to BaseTool instance.""" - return self._tools_by_name - - def _func( - self, - input: list[AnyMessage] | dict[str, Any] | BaseModel, - config: RunnableConfig, - *, - store: Optional[BaseStore], # noqa: UP045 - ) -> Any: - try: - runtime = get_runtime() - except RuntimeError: - # Running outside of LangGraph runtime context (e.g., unit tests) - runtime = None - - tool_calls, input_type = self._parse_input(input) - tool_calls = [self._inject_tool_args(call, input, store) for call in tool_calls] - - config_list = get_config_list(config, len(tool_calls)) - input_types = [input_type] * len(tool_calls) - inputs = [input] * len(tool_calls) - runtimes = [runtime] * len(tool_calls) - with get_executor_for_config(config) as executor: - outputs = [ - *executor.map(self._run_one, tool_calls, input_types, config_list, inputs, runtimes) - ] - - return self._combine_tool_outputs(outputs, input_type) - - async def _afunc( - self, - input: list[AnyMessage] | dict[str, Any] | BaseModel, - config: RunnableConfig, - *, - store: Optional[BaseStore], # noqa: UP045 - ) -> Any: - try: - runtime = get_runtime() - except RuntimeError: - # Running outside of LangGraph runtime context (e.g., unit tests) - runtime = None - - tool_calls, input_type = self._parse_input(input) - tool_calls = [self._inject_tool_args(call, input, store) for call in tool_calls] - outputs = await asyncio.gather( - *(self._arun_one(call, input_type, config, input, runtime) for call in tool_calls) - ) - - return self._combine_tool_outputs(outputs, input_type) - - def _combine_tool_outputs( - self, - outputs: list[ToolMessage | Command], - input_type: Literal["list", "dict", "tool_calls"], - ) -> list[Command | list[ToolMessage] | dict[str, list[ToolMessage]]]: - # preserve existing behavior for non-command tool outputs for backwards - # compatibility - if not any(isinstance(output, Command) for output in outputs): - # TypedDict, pydantic, dataclass, etc. should all be able to load from dict - return outputs if input_type == "list" else {self._messages_key: outputs} # type: ignore[return-value, return-value] - - # LangGraph will automatically handle list of Command and non-command node - # updates - combined_outputs: list[Command | list[ToolMessage] | dict[str, list[ToolMessage]]] = [] - - # combine all parent commands with goto into a single parent command - parent_command: Command | None = None - for output in outputs: - if isinstance(output, Command): - if ( - output.graph is Command.PARENT - and isinstance(output.goto, list) - and all(isinstance(send, Send) for send in output.goto) - ): - if parent_command: - parent_command = replace( - parent_command, - goto=cast("list[Send]", parent_command.goto) + output.goto, - ) - else: - parent_command = Command(graph=Command.PARENT, goto=output.goto) - else: - combined_outputs.append(output) - else: - combined_outputs.append( - [output] if input_type == "list" else {self._messages_key: [output]} - ) - - if parent_command: - combined_outputs.append(parent_command) - return combined_outputs - - def _execute_tool_sync( - self, - request: ToolCallRequest, - input_type: Literal["list", "dict", "tool_calls"], - config: RunnableConfig, - ) -> ToolMessage | Command: - """Execute tool call with configured error handling. - - Args: - request: Tool execution request. - input_type: Input format. - config: Runnable configuration. - - Returns: - ToolMessage or Command. - - Raises: - Exception: If tool fails and handle_tool_errors is False. - """ - call = request.tool_call - tool = request.tool - call_args = {**call, "type": "tool_call"} - - try: - try: - response = tool.invoke(call_args, config) - except ValidationError as exc: - raise ToolInvocationError(call["name"], exc, call["args"]) from exc - - # GraphInterrupt is a special exception that will always be raised. - # It can be triggered in the following scenarios, - # Where GraphInterrupt(GraphBubbleUp) is raised from an `interrupt` invocation - # most commonly: - # (1) a GraphInterrupt is raised inside a tool - # (2) a GraphInterrupt is raised inside a graph node for a graph called as a tool - # (3) a GraphInterrupt is raised when a subgraph is interrupted inside a graph - # called as a tool - # (2 and 3 can happen in a "supervisor w/ tools" multi-agent architecture) - except GraphBubbleUp: - raise - except Exception as e: - # Determine which exception types are handled - handled_types: tuple[type[Exception], ...] - if isinstance(self._handle_tool_errors, type) and issubclass( - self._handle_tool_errors, Exception - ): - handled_types = (self._handle_tool_errors,) - elif isinstance(self._handle_tool_errors, tuple): - handled_types = self._handle_tool_errors - elif callable(self._handle_tool_errors) and not isinstance( - self._handle_tool_errors, type - ): - handled_types = _infer_handled_types(self._handle_tool_errors) - else: - # default behavior is catching all exceptions - handled_types = (Exception,) - - # Check if this error should be handled - if not self._handle_tool_errors or not isinstance(e, handled_types): - raise - - # Error is handled - create error ToolMessage - content = _handle_tool_error(e, flag=self._handle_tool_errors) - return ToolMessage( - content=content, - name=call["name"], - tool_call_id=call["id"], - status="error", - ) - - # Process successful response - if isinstance(response, Command): - # Validate Command before returning to handler - return self._validate_tool_command(response, request.tool_call, input_type) - if isinstance(response, ToolMessage): - response.content = cast("str | list", msg_content_output(response.content)) - return response - - msg = f"Tool {call['name']} returned unexpected type: {type(response)}" - raise TypeError(msg) - - def _run_one( - self, - call: ToolCall, - input_type: Literal["list", "dict", "tool_calls"], - config: RunnableConfig, - input: list[AnyMessage] | dict[str, Any] | BaseModel, - runtime: Any, - ) -> ToolMessage | Command: - """Execute single tool call with on_tool_call handler if configured. - - Args: - call: Tool call dict. - input_type: Input format. - config: Runnable configuration. - input: Agent state. - runtime: LangGraph runtime or None. - - Returns: - ToolMessage or Command. - """ - if invalid_tool_message := self._validate_tool_call(call): - return invalid_tool_message - - tool = self.tools_by_name[call["name"]] - - # Extract state from ToolCallWithContext if present - state = self._extract_state(input) - - # Create the tool request with state and runtime - tool_request = ToolCallRequest( - tool_call=call, - tool=tool, - state=state, - runtime=runtime, - ) - - if self._on_tool_call is None: - # No handler - execute directly - return self._execute_tool_sync(tool_request, input_type, config) - - # Define execute callable that can be called multiple times - def execute(req: ToolCallRequest) -> ToolMessage | Command: - """Execute tool with given request. Can be called multiple times.""" - return self._execute_tool_sync(req, input_type, config) - - # Call handler with request and execute callable - try: - return self._on_tool_call(tool_request, execute) - except Exception as e: - # Handler threw an exception - if not self._handle_tool_errors: - raise - # Convert to error message - content = _handle_tool_error(e, flag=self._handle_tool_errors) - return ToolMessage( - content=content, - name=tool_request.tool_call["name"], - tool_call_id=tool_request.tool_call["id"], - status="error", - ) - - async def _execute_tool_async( - self, - request: ToolCallRequest, - input_type: Literal["list", "dict", "tool_calls"], - config: RunnableConfig, - ) -> ToolMessage | Command: - """Execute tool call asynchronously with configured error handling. - - Args: - request: Tool execution request. - input_type: Input format. - config: Runnable configuration. - - Returns: - ToolMessage or Command. - - Raises: - Exception: If tool fails and handle_tool_errors is False. - """ - call = request.tool_call - tool = request.tool - call_args = {**call, "type": "tool_call"} - - try: - try: - response = await tool.ainvoke(call_args, config) - except ValidationError as exc: - raise ToolInvocationError(call["name"], exc, call["args"]) from exc - - # GraphInterrupt is a special exception that will always be raised. - # It can be triggered in the following scenarios, - # Where GraphInterrupt(GraphBubbleUp) is raised from an `interrupt` invocation - # most commonly: - # (1) a GraphInterrupt is raised inside a tool - # (2) a GraphInterrupt is raised inside a graph node for a graph called as a tool - # (3) a GraphInterrupt is raised when a subgraph is interrupted inside a graph - # called as a tool - # (2 and 3 can happen in a "supervisor w/ tools" multi-agent architecture) - except GraphBubbleUp: - raise - except Exception as e: - # Determine which exception types are handled - handled_types: tuple[type[Exception], ...] - if isinstance(self._handle_tool_errors, type) and issubclass( - self._handle_tool_errors, Exception - ): - handled_types = (self._handle_tool_errors,) - elif isinstance(self._handle_tool_errors, tuple): - handled_types = self._handle_tool_errors - elif callable(self._handle_tool_errors) and not isinstance( - self._handle_tool_errors, type - ): - handled_types = _infer_handled_types(self._handle_tool_errors) - else: - # default behavior is catching all exceptions - handled_types = (Exception,) - - # Check if this error should be handled - if not self._handle_tool_errors or not isinstance(e, handled_types): - raise - - # Error is handled - create error ToolMessage - content = _handle_tool_error(e, flag=self._handle_tool_errors) - return ToolMessage( - content=content, - name=call["name"], - tool_call_id=call["id"], - status="error", - ) - - # Process successful response - if isinstance(response, Command): - # Validate Command before returning to handler - return self._validate_tool_command(response, request.tool_call, input_type) - if isinstance(response, ToolMessage): - response.content = cast("str | list", msg_content_output(response.content)) - return response - - msg = f"Tool {call['name']} returned unexpected type: {type(response)}" - raise TypeError(msg) - - async def _arun_one( - self, - call: ToolCall, - input_type: Literal["list", "dict", "tool_calls"], - config: RunnableConfig, - input: list[AnyMessage] | dict[str, Any] | BaseModel, - runtime: Any, - ) -> ToolMessage | Command: - """Execute single tool call asynchronously with on_tool_call handler if configured. - - Args: - call: Tool call dict. - input_type: Input format. - config: Runnable configuration. - input: Agent state. - runtime: LangGraph runtime or None. - - Returns: - ToolMessage or Command. - """ - if invalid_tool_message := self._validate_tool_call(call): - return invalid_tool_message - - tool = self.tools_by_name[call["name"]] - - # Extract state from ToolCallWithContext if present - state = self._extract_state(input) - - # Create the tool request with state and runtime - tool_request = ToolCallRequest( - tool_call=call, - tool=tool, - state=state, - runtime=runtime, - ) - - if self._on_tool_call is None: - # No handler - execute directly - return await self._execute_tool_async(tool_request, input_type, config) - - # Define async execute callable that can be called multiple times - async def execute(req: ToolCallRequest) -> ToolMessage | Command: - """Execute tool with given request. Can be called multiple times.""" - return await self._execute_tool_async(req, input_type, config) - - # Call handler with request and execute callable - # Note: handler is sync, but execute callable is async - try: - result = self._on_tool_call(tool_request, execute) # type: ignore[arg-type] - # If result is a coroutine, await it (though handler should be sync) - return await result if hasattr(result, "__await__") else result - except Exception as e: - # Handler threw an exception - if not self._handle_tool_errors: - raise - # Convert to error message - content = _handle_tool_error(e, flag=self._handle_tool_errors) - return ToolMessage( - content=content, - name=tool_request.tool_call["name"], - tool_call_id=tool_request.tool_call["id"], - status="error", - ) - - def _parse_input( - self, - input: list[AnyMessage] | dict[str, Any] | BaseModel, - ) -> tuple[list[ToolCall], Literal["list", "dict", "tool_calls"]]: - input_type: Literal["list", "dict", "tool_calls"] - if isinstance(input, list): - if isinstance(input[-1], dict) and input[-1].get("type") == "tool_call": - input_type = "tool_calls" - tool_calls = cast("list[ToolCall]", input) - return tool_calls, input_type - input_type = "list" - messages = input - elif isinstance(input, dict) and input.get("__type") == "tool_call_with_context": - # Handle ToolCallWithContext from Send API - # mypy will not be able to type narrow correctly since the signature - # for input contains dict[str, Any]. We'd need to narrow dict[str, Any] - # before we can apply correct typing. - input_with_ctx = cast("ToolCallWithContext", input) - input_type = "tool_calls" - return [input_with_ctx["tool_call"]], input_type - elif isinstance(input, dict) and (messages := input.get(self._messages_key, [])): - input_type = "dict" - elif messages := getattr(input, self._messages_key, []): - # Assume dataclass-like state that can coerce from dict - input_type = "dict" - else: - msg = "No message found in input" - raise ValueError(msg) - - try: - latest_ai_message = next(m for m in reversed(messages) if isinstance(m, AIMessage)) - except StopIteration: - msg = "No AIMessage found in input" - raise ValueError(msg) - - tool_calls = list(latest_ai_message.tool_calls) - return tool_calls, input_type - - def _validate_tool_call(self, call: ToolCall) -> ToolMessage | None: - requested_tool = call["name"] - if requested_tool not in self.tools_by_name: - all_tool_names = list(self.tools_by_name.keys()) - content = INVALID_TOOL_NAME_ERROR_TEMPLATE.format( - requested_tool=requested_tool, - available_tools=", ".join(all_tool_names), - ) - return ToolMessage( - content, name=requested_tool, tool_call_id=call["id"], status="error" - ) - return None - - def _extract_state( - self, input: list[AnyMessage] | dict[str, Any] | BaseModel - ) -> list[AnyMessage] | dict[str, Any] | BaseModel: - """Extract state from input, handling ToolCallWithContext if present. - - Args: - input: The input which may be raw state or ToolCallWithContext. - - Returns: - The actual state to pass to on_tool_call handlers. - """ - if isinstance(input, dict) and input.get("__type") == "tool_call_with_context": - return input["state"] - return input - - def _inject_state( - self, - tool_call: ToolCall, - input: list[AnyMessage] | dict[str, Any] | BaseModel, - ) -> ToolCall: - state_args = self._tool_to_state_args[tool_call["name"]] - if state_args and isinstance(input, list): - required_fields = list(state_args.values()) - if ( - len(required_fields) == 1 and required_fields[0] == self._messages_key - ) or required_fields[0] is None: - input = {self._messages_key: input} - else: - err_msg = ( - f"Invalid input to ToolNode. Tool {tool_call['name']} requires " - f"graph state dict as input." - ) - if any(state_field for state_field in state_args.values()): - required_fields_str = ", ".join(f for f in required_fields if f) - err_msg += f" State should contain fields {required_fields_str}." - raise ValueError(err_msg) - - # Extract state from ToolCallWithContext if present - if isinstance(input, dict) and input.get("__type") == "tool_call_with_context": - state = input["state"] - else: - state = input - - if isinstance(state, dict): - tool_state_args = { - tool_arg: state[state_field] if state_field else state - for tool_arg, state_field in state_args.items() - } - else: - tool_state_args = { - tool_arg: getattr(state, state_field) if state_field else state - for tool_arg, state_field in state_args.items() - } - - tool_call["args"] = { - **tool_call["args"], - **tool_state_args, - } - return tool_call - - def _inject_store(self, tool_call: ToolCall, store: BaseStore | None) -> ToolCall: - store_arg = self._tool_to_store_arg[tool_call["name"]] - if not store_arg: - return tool_call - - if store is None: - msg = ( - "Cannot inject store into tools with InjectedStore annotations - " - "please compile your graph with a store." - ) - raise ValueError(msg) - - tool_call["args"] = { - **tool_call["args"], - store_arg: store, - } - return tool_call - - def _inject_tool_args( - self, - tool_call: ToolCall, - input: list[AnyMessage] | dict[str, Any] | BaseModel, - store: BaseStore | None, - ) -> ToolCall: - """Inject graph state and store into tool call arguments. - - This is an internal method that enables tools to access graph context that - should not be controlled by the model. Tools can declare dependencies on graph - state or persistent storage using InjectedState and InjectedStore annotations. - This method automatically identifies these dependencies and injects the - appropriate values. - - The injection process preserves the original tool call structure while adding - the necessary context arguments. This allows tools to be both model-callable - and context-aware without exposing internal state management to the model. - - Args: - tool_call: The tool call dictionary to augment with injected arguments. - Must contain 'name', 'args', 'id', and 'type' fields. - input: The current graph state to inject into tools requiring state access. - Can be a message list, state dictionary, or BaseModel instance. - store: The persistent store instance to inject into tools requiring storage. - Will be None if no store is configured for the graph. - - Returns: - A new ToolCall dictionary with the same structure as the input but with - additional arguments injected based on the tool's annotation requirements. - - Raises: - ValueError: If a tool requires store injection but no store is provided, - or if state injection requirements cannot be satisfied. - - Note: - This method is called automatically during tool execution. It should not - be called from outside the ToolNode. - """ - if tool_call["name"] not in self.tools_by_name: - return tool_call - - tool_call_copy: ToolCall = copy(tool_call) - tool_call_with_state = self._inject_state(tool_call_copy, input) - return self._inject_store(tool_call_with_state, store) - - def _validate_tool_command( - self, - command: Command, - call: ToolCall, - input_type: Literal["list", "dict", "tool_calls"], - ) -> Command: - if isinstance(command.update, dict): - # input type is dict when ToolNode is invoked with a dict input - # (e.g. {"messages": [AIMessage(..., tool_calls=[...])]}) - if input_type not in ("dict", "tool_calls"): - msg = ( - "Tools can provide a dict in Command.update only when using dict " - f"with '{self._messages_key}' key as ToolNode input, " - f"got: {command.update} for tool '{call['name']}'" - ) - raise ValueError(msg) - - updated_command = deepcopy(command) - state_update = cast("dict[str, Any]", updated_command.update) or {} - messages_update = state_update.get(self._messages_key, []) - elif isinstance(command.update, list): - # Input type is list when ToolNode is invoked with a list input - # (e.g. [AIMessage(..., tool_calls=[...])]) - if input_type != "list": - msg = ( - "Tools can provide a list of messages in Command.update " - "only when using list of messages as ToolNode input, " - f"got: {command.update} for tool '{call['name']}'" - ) - raise ValueError(msg) - - updated_command = deepcopy(command) - messages_update = updated_command.update - else: - return command - - # convert to message objects if updates are in a dict format - messages_update = convert_to_messages(messages_update) - - # no validation needed if all messages are being removed - if messages_update == [RemoveMessage(id=REMOVE_ALL_MESSAGES)]: - return updated_command - - has_matching_tool_message = False - for message in messages_update: - if not isinstance(message, ToolMessage): - continue - - if message.tool_call_id == call["id"]: - message.name = call["name"] - has_matching_tool_message = True - - # validate that we always have a ToolMessage matching the tool call in - # Command.update if command is sent to the CURRENT graph - if updated_command.graph is None and not has_matching_tool_message: - example_update = ( - '`Command(update={"messages": ' - '[ToolMessage("Success", tool_call_id=tool_call_id), ...]}, ...)`' - if input_type == "dict" - else "`Command(update=" - '[ToolMessage("Success", tool_call_id=tool_call_id), ...], ...)`' - ) - msg = ( - "Expected to have a matching ToolMessage in Command.update " - f"for tool '{call['name']}', got: {messages_update}. " - "Every tool call (LLM requesting to call a tool) " - "in the message history MUST have a corresponding ToolMessage. " - f"You can fix it by modifying the tool to return {example_update}." - ) - raise ValueError(msg) - return updated_command - - -def tools_condition( - state: list[AnyMessage] | dict[str, Any] | BaseModel, - messages_key: str = "messages", -) -> Literal["tools", "__end__"]: - """Conditional routing function for tool-calling workflows. - - This utility function implements the standard conditional logic for ReAct-style - agents: if the last AI message contains tool calls, route to the tool execution - node; otherwise, end the workflow. This pattern is fundamental to most tool-calling - agent architectures. - - The function handles multiple state formats commonly used in LangGraph applications, - making it flexible for different graph designs while maintaining consistent behavior. - - Args: - state: The current graph state to examine for tool calls. Supported formats: - - Dictionary containing a messages key (for StateGraph) - - BaseModel instance with a messages attribute - messages_key: The key or attribute name containing the message list in the state. - This allows customization for graphs using different state schemas. - Defaults to "messages". - - Returns: - Either "tools" if tool calls are present in the last AI message, or "__end__" - to terminate the workflow. These are the standard routing destinations for - tool-calling conditional edges. - - Raises: - ValueError: If no messages can be found in the provided state format. - - Example: - Basic usage in a ReAct agent: - - ```python - from langgraph.graph import StateGraph - from langchain.tools import ToolNode - from langchain.tools.tool_node import tools_condition - from typing_extensions import TypedDict - - - class State(TypedDict): - messages: list - - - graph = StateGraph(State) - graph.add_node("llm", call_model) - graph.add_node("tools", ToolNode([my_tool])) - graph.add_conditional_edges( - "llm", - tools_condition, # Routes to "tools" or "__end__" - {"tools": "tools", "__end__": "__end__"}, - ) - ``` - - Custom messages key: - - ```python - def custom_condition(state): - return tools_condition(state, messages_key="chat_history") - ``` - - Note: - This function is designed to work seamlessly with ToolNode and standard - LangGraph patterns. It expects the last message to be an AIMessage when - tool calls are present, which is the standard output format for tool-calling - language models. - """ - if isinstance(state, list): - ai_message = state[-1] - elif (isinstance(state, dict) and (messages := state.get(messages_key, []))) or ( - messages := getattr(state, messages_key, []) - ): - ai_message = messages[-1] - else: - msg = f"No messages found in input state to tool_edge: {state}" - raise ValueError(msg) - if hasattr(ai_message, "tool_calls") and len(ai_message.tool_calls) > 0: - return "tools" - return "__end__" - - -class InjectedState(InjectedToolArg): - """Annotation for injecting graph state into tool arguments. - - This annotation enables tools to access graph state without exposing state - management details to the language model. Tools annotated with InjectedState - receive state data automatically during execution while remaining invisible - to the model's tool-calling interface. - - Args: - field: Optional key to extract from the state dictionary. If `None`, the entire - state is injected. If specified, only that field's value is injected. - This allows tools to request specific state components rather than - processing the full state structure. - - Example: - ```python - from typing import List - from typing_extensions import Annotated, TypedDict - - from langchain_core.messages import BaseMessage, AIMessage - from langchain.tools import InjectedState, ToolNode, tool - - - class AgentState(TypedDict): - messages: List[BaseMessage] - foo: str - - - @tool - def state_tool(x: int, state: Annotated[dict, InjectedState]) -> str: - '''Do something with state.''' - if len(state["messages"]) > 2: - return state["foo"] + str(x) - else: - return "not enough messages" - - - @tool - def foo_tool(x: int, foo: Annotated[str, InjectedState("foo")]) -> str: - '''Do something else with state.''' - return foo + str(x + 1) - - - node = ToolNode([state_tool, foo_tool]) - - tool_call1 = {"name": "state_tool", "args": {"x": 1}, "id": "1", "type": "tool_call"} - tool_call2 = {"name": "foo_tool", "args": {"x": 1}, "id": "2", "type": "tool_call"} - state = { - "messages": [AIMessage("", tool_calls=[tool_call1, tool_call2])], - "foo": "bar", - } - node.invoke(state) - ``` - - ```python - [ - ToolMessage(content="not enough messages", name="state_tool", tool_call_id="1"), - ToolMessage(content="bar2", name="foo_tool", tool_call_id="2"), - ] - ``` - - Note: - - InjectedState arguments are automatically excluded from tool schemas - presented to language models - - ToolNode handles the injection process during execution - - Tools can mix regular arguments (controlled by the model) with injected - arguments (controlled by the system) - - State injection occurs after the model generates tool calls but before - tool execution - """ - - def __init__(self, field: str | None = None) -> None: - """Initialize the InjectedState annotation.""" - self.field = field - - -class InjectedStore(InjectedToolArg): - """Annotation for injecting persistent store into tool arguments. - - This annotation enables tools to access LangGraph's persistent storage system - without exposing storage details to the language model. Tools annotated with - InjectedStore receive the store instance automatically during execution while - remaining invisible to the model's tool-calling interface. - - The store provides persistent, cross-session data storage that tools can use - for maintaining context, user preferences, or any other data that needs to - persist beyond individual workflow executions. - - !!! warning - `InjectedStore` annotation requires `langchain-core >= 0.3.8` - - Example: - ```python - from typing_extensions import Annotated - from langgraph.store.memory import InMemoryStore - from langchain.tools import InjectedStore, ToolNode, tool - - @tool - def save_preference( - key: str, - value: str, - store: Annotated[Any, InjectedStore()] - ) -> str: - \"\"\"Save user preference to persistent storage.\"\"\" - store.put(("preferences",), key, value) - return f"Saved {key} = {value}" - - @tool - def get_preference( - key: str, - store: Annotated[Any, InjectedStore()] - ) -> str: - \"\"\"Retrieve user preference from persistent storage.\"\"\" - result = store.get(("preferences",), key) - return result.value if result else "Not found" - ``` - - Usage with ToolNode and graph compilation: - - ```python - from langgraph.graph import StateGraph - from langgraph.store.memory import InMemoryStore - - store = InMemoryStore() - tool_node = ToolNode([save_preference, get_preference]) - - graph = StateGraph(State) - graph.add_node("tools", tool_node) - compiled_graph = graph.compile(store=store) # Store is injected automatically - ``` - - Cross-session persistence: - - ```python - # First session - result1 = graph.invoke({"messages": [HumanMessage("Save my favorite color as blue")]}) - - # Later session - data persists - result2 = graph.invoke({"messages": [HumanMessage("What's my favorite color?")]}) - ``` - - Note: - - InjectedStore arguments are automatically excluded from tool schemas - presented to language models - - The store instance is automatically injected by ToolNode during execution - - Tools can access namespaced storage using the store's get/put methods - - Store injection requires the graph to be compiled with a store instance - - Multiple tools can share the same store instance for data consistency - """ - - -def _is_injection(type_arg: Any, injection_type: type[InjectedState | InjectedStore]) -> bool: - """Check if a type argument represents an injection annotation. - - This utility function determines whether a type annotation indicates that - an argument should be injected with state or store data. It handles both - direct annotations and nested annotations within Union or Annotated types. - - Args: - type_arg: The type argument to check for injection annotations. - injection_type: The injection type to look for (InjectedState or InjectedStore). - - Returns: - True if the type argument contains the specified injection annotation. - """ - if isinstance(type_arg, injection_type) or ( - isinstance(type_arg, type) and issubclass(type_arg, injection_type) - ): - return True - origin_ = get_origin(type_arg) - if origin_ is Union or origin_ is Annotated: - return any(_is_injection(ta, injection_type) for ta in get_args(type_arg)) - return False - - -def _get_state_args(tool: BaseTool) -> dict[str, str | None]: - """Extract state injection mappings from tool annotations. - - This function analyzes a tool's input schema to identify arguments that should - be injected with graph state. It processes InjectedState annotations to build - a mapping of tool argument names to state field names. - - Args: - tool: The tool to analyze for state injection requirements. - - Returns: - A dictionary mapping tool argument names to state field names. If a field - name is None, the entire state should be injected for that argument. - """ - full_schema = tool.get_input_schema() - tool_args_to_state_fields: dict = {} - - for name, type_ in get_all_basemodel_annotations(full_schema).items(): - injections = [ - type_arg for type_arg in get_args(type_) if _is_injection(type_arg, InjectedState) - ] - if len(injections) > 1: - msg = ( - "A tool argument should not be annotated with InjectedState more than " - f"once. Received arg {name} with annotations {injections}." - ) - raise ValueError(msg) - if len(injections) == 1: - injection = injections[0] - if isinstance(injection, InjectedState) and injection.field: - tool_args_to_state_fields[name] = injection.field - else: - tool_args_to_state_fields[name] = None - else: - pass - return tool_args_to_state_fields - - -def _get_store_arg(tool: BaseTool) -> str | None: - """Extract store injection argument from tool annotations. - - This function analyzes a tool's input schema to identify the argument that - should be injected with the graph store. Only one store argument is supported - per tool. - - Args: - tool: The tool to analyze for store injection requirements. - - Returns: - The name of the argument that should receive the store injection, or None - if no store injection is required. - - Raises: - ValueError: If a tool argument has multiple InjectedStore annotations. - """ - full_schema = tool.get_input_schema() - for name, type_ in get_all_basemodel_annotations(full_schema).items(): - injections = [ - type_arg for type_arg in get_args(type_) if _is_injection(type_arg, InjectedStore) - ] - if len(injections) > 1: - msg = ( - "A tool argument should not be annotated with InjectedStore more than " - f"once. Received arg {name} with annotations {injections}." - ) - raise ValueError(msg) - if len(injections) == 1: - return name - - return None diff --git a/libs/langchain_v1/pyproject.toml b/libs/langchain_v1/pyproject.toml index a4a2ee01a91..3366a428f90 100644 --- a/libs/langchain_v1/pyproject.toml +++ b/libs/langchain_v1/pyproject.toml @@ -3,25 +3,26 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain" +description = "Building applications with LLMs through composability" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.4" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", - "langgraph>=1.0.0a4,<2.0.0", + "langchain-core>=1.0.2,<2.0.0", + "langgraph>=1.0.2,<1.1.0", "pydantic>=2.7.4,<3.0.0", ] -name = "langchain" -version = "1.0.0a12" -description = "Building applications with LLMs through composability" -readme = "README.md" - [project.optional-dependencies] +model-profiles = ["langchain-model-profiles"] community = ["langchain-community"] anthropic = ["langchain-anthropic"] openai = ["langchain-openai"] -#azure-ai = ["langchain-azure-ai"] +azure-ai = ["langchain-azure-ai"] #cohere = ["langchain-cohere"] google-vertexai = ["langchain-google-vertexai"] google-genai = ["langchain-google-genai"] @@ -29,7 +30,7 @@ fireworks = ["langchain-fireworks"] ollama = ["langchain-ollama"] together = ["langchain-together"] mistralai = ["langchain-mistralai"] -#huggingface = ["langchain-huggingface"] +huggingface = ["langchain-huggingface"] groq = ["langchain-groq"] aws = ["langchain-aws"] deepseek = ["langchain-deepseek"] @@ -37,12 +38,13 @@ xai = ["langchain-xai"] perplexity = ["langchain-perplexity"] [project.urls] -homepage = "https://docs.langchain.com/" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/langchain" -changelog = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain%3D%3D1%22" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/" +Documentation = "https://reference.langchain.com/python/langchain/langchain/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/langchain" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain%3D%3D1%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ @@ -56,8 +58,7 @@ test = [ "syrupy>=4.0.2,<5.0.0", "toml>=0.10.2,<1.0.0", "langchain-tests", - "langchain-text-splitters", - "langchain-openai" + "langchain-openai", ] lint = [ "ruff>=0.12.2,<0.13.0", @@ -115,6 +116,7 @@ ignore = [ "PLC0415", # Imports should be at the top. Not always desirable "PLR0913", # Too many arguments in function definition "PLC0414", # Inconsistent with how type checkers expect to be notified of intentional re-exports + "RUF002", # Em-dash in docstring ] unfixable = ["B028"] # People should intentionally tune the stacklevel diff --git a/libs/langchain_v1/tests/integration_tests/agents/middleware/__init__.py b/libs/langchain_v1/tests/integration_tests/agents/middleware/__init__.py new file mode 100644 index 00000000000..df702936a4a --- /dev/null +++ b/libs/langchain_v1/tests/integration_tests/agents/middleware/__init__.py @@ -0,0 +1 @@ +"""Integration tests for agent middleware.""" diff --git a/libs/langchain_v1/tests/integration_tests/agents/middleware/test_shell_tool_integration.py b/libs/langchain_v1/tests/integration_tests/agents/middleware/test_shell_tool_integration.py new file mode 100644 index 00000000000..82b3e1b2588 --- /dev/null +++ b/libs/langchain_v1/tests/integration_tests/agents/middleware/test_shell_tool_integration.py @@ -0,0 +1,146 @@ +"""Integration tests for ShellToolMiddleware with create_agent.""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any + +import pytest +from langchain_core.messages import HumanMessage + +from langchain.agents import create_agent +from langchain.agents.middleware.shell_tool import ShellToolMiddleware + + +def _get_model(provider: str) -> Any: + """Get chat model for the specified provider.""" + if provider == "anthropic": + from langchain_anthropic import ChatAnthropic + + return ChatAnthropic(model="claude-sonnet-4-5-20250929") + elif provider == "openai": + from langchain_openai import ChatOpenAI + + return ChatOpenAI(model="gpt-4o-mini") + else: + msg = f"Unknown provider: {provider}" + raise ValueError(msg) + + +@pytest.mark.parametrize("provider", ["anthropic", "openai"]) +def test_shell_tool_basic_execution(tmp_path: Path, provider: str) -> None: + """Test basic shell command execution across different models.""" + pytest.importorskip(f"langchain_{provider}") + + workspace = tmp_path / "workspace" + agent = create_agent( + model=_get_model(provider), + middleware=[ShellToolMiddleware(workspace_root=workspace)], + ) + + result = agent.invoke( + {"messages": [HumanMessage("Run the command 'echo hello' and tell me what it outputs")]} + ) + + tool_messages = [msg for msg in result["messages"] if msg.type == "tool"] + assert len(tool_messages) > 0, "Shell tool should have been called" + + tool_outputs = [msg.content for msg in tool_messages] + assert any("hello" in output.lower() for output in tool_outputs), ( + "Shell output should contain 'hello'" + ) + + +@pytest.mark.requires("langchain_anthropic") +def test_shell_session_persistence(tmp_path: Path) -> None: + """Test shell session state persists across multiple tool calls.""" + workspace = tmp_path / "workspace" + agent = create_agent( + model=_get_model("anthropic"), + middleware=[ShellToolMiddleware(workspace_root=workspace)], + ) + + result = agent.invoke( + { + "messages": [ + HumanMessage( + "First run 'export TEST_VAR=hello'. " + "Then run 'echo $TEST_VAR' to verify it persists." + ) + ] + } + ) + + tool_messages = [msg for msg in result["messages"] if msg.type == "tool"] + assert len(tool_messages) >= 2, "Shell tool should be called multiple times" + + tool_outputs = [msg.content for msg in tool_messages] + assert any("hello" in output for output in tool_outputs), "Environment variable should persist" + + +@pytest.mark.requires("langchain_anthropic") +def test_shell_tool_error_handling(tmp_path: Path) -> None: + """Test shell tool captures command errors.""" + workspace = tmp_path / "workspace" + agent = create_agent( + model=_get_model("anthropic"), + middleware=[ShellToolMiddleware(workspace_root=workspace)], + ) + + result = agent.invoke( + { + "messages": [ + HumanMessage( + "Run the command 'ls /nonexistent_directory_12345' and show me the result" + ) + ] + } + ) + + tool_messages = [msg for msg in result["messages"] if msg.type == "tool"] + assert len(tool_messages) > 0, "Shell tool should have been called" + + tool_outputs = " ".join(msg.content for msg in tool_messages) + assert ( + "no such file" in tool_outputs.lower() + or "cannot access" in tool_outputs.lower() + or "not found" in tool_outputs.lower() + or "exit code" in tool_outputs.lower() + ), "Error should be captured in tool output" + + +@pytest.mark.requires("langchain_anthropic") +def test_shell_tool_with_custom_tools(tmp_path: Path) -> None: + """Test shell tool works alongside custom tools.""" + from langchain_core.tools import tool + + workspace = tmp_path / "workspace" + + @tool + def custom_greeting(name: str) -> str: + """Greet someone by name.""" + return f"Hello, {name}!" + + agent = create_agent( + model=_get_model("anthropic"), + tools=[custom_greeting], + middleware=[ShellToolMiddleware(workspace_root=workspace)], + ) + + result = agent.invoke( + { + "messages": [ + HumanMessage( + "First, use the custom_greeting tool to greet 'Alice'. " + "Then run the shell command 'echo world'." + ) + ] + } + ) + + tool_messages = [msg for msg in result["messages"] if msg.type == "tool"] + assert len(tool_messages) >= 2, "Both tools should have been called" + + tool_outputs = " ".join(msg.content for msg in tool_messages) + assert "Alice" in tool_outputs, "Custom tool should be used" + assert "world" in tool_outputs, "Shell tool should be used" diff --git a/libs/langchain_v1/tests/integration_tests/agents/test_response_format.py b/libs/langchain_v1/tests/integration_tests/agents/test_response_format.py index 2c8a5a401c4..db3edf6dcf6 100644 --- a/libs/langchain_v1/tests/integration_tests/agents/test_response_format.py +++ b/libs/langchain_v1/tests/integration_tests/agents/test_response_format.py @@ -26,7 +26,7 @@ def test_inference_to_native_output() -> None: model = ChatOpenAI(model="gpt-5") agent = create_agent( model, - prompt=( + system_prompt=( "You are a helpful weather assistant. Please call the get_weather tool, " "then use the WeatherReport tool to generate the final response." ), @@ -56,7 +56,7 @@ def test_inference_to_tool_output() -> None: model = ChatOpenAI(model="gpt-4") agent = create_agent( model, - prompt=( + system_prompt=( "You are a helpful weather assistant. Please call the get_weather tool, " "then use the WeatherReport tool to generate the final response." ), diff --git a/libs/langchain_v1/tests/integration_tests/chat_models/test_base.py b/libs/langchain_v1/tests/integration_tests/chat_models/test_base.py index c2ebc5638ea..ad0a90fa1da 100644 --- a/libs/langchain_v1/tests/integration_tests/chat_models/test_base.py +++ b/libs/langchain_v1/tests/integration_tests/chat_models/test_base.py @@ -25,7 +25,7 @@ async def test_init_chat_model_chain() -> None: model_with_config = model_with_tools.with_config( RunnableConfig(tags=["foo"]), - configurable={"bar_model": "claude-3-7-sonnet-20250219"}, + configurable={"bar_model": "claude-sonnet-4-5-20250929"}, ) prompt = ChatPromptTemplate.from_messages([("system", "foo"), ("human", "{input}")]) chain = prompt | model_with_config diff --git a/libs/langchain_v1/tests/unit_tests/agents/__snapshots__/test_middleware_agent.ambr b/libs/langchain_v1/tests/unit_tests/agents/__snapshots__/test_middleware_agent.ambr index 6f1e17badcc..75be381108b 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/__snapshots__/test_middleware_agent.ambr +++ b/libs/langchain_v1/tests/unit_tests/agents/__snapshots__/test_middleware_agent.ambr @@ -20,7 +20,6 @@ __start__ --> NoopZero\2ebefore_agent; model -.-> NoopTwo\2eafter_agent; model -.-> tools; - tools -.-> NoopTwo\2eafter_agent; tools -.-> model; NoopOne\2eafter_agent --> __end__; classDef default fill:#f2f0ff,line-height:1.2 @@ -343,7 +342,6 @@ __start__ --> NoopSeven\2ebefore_model; model --> NoopEight\2eafter_model; tools -.-> NoopSeven\2ebefore_model; - tools -.-> __end__; classDef default fill:#f2f0ff,line-height:1.2 classDef first fill-opacity:0 classDef last fill:#bfb6fc @@ -376,7 +374,6 @@ __start__ --> NoopSeven\2ebefore_model; model --> NoopEight\2eafter_model; tools -.-> NoopSeven\2ebefore_model; - tools -.-> __end__; classDef default fill:#f2f0ff,line-height:1.2 classDef first fill-opacity:0 classDef last fill:#bfb6fc @@ -409,7 +406,6 @@ __start__ --> NoopSeven\2ebefore_model; model --> NoopEight\2eafter_model; tools -.-> NoopSeven\2ebefore_model; - tools -.-> __end__; classDef default fill:#f2f0ff,line-height:1.2 classDef first fill-opacity:0 classDef last fill:#bfb6fc @@ -442,7 +438,6 @@ __start__ --> NoopSeven\2ebefore_model; model --> NoopEight\2eafter_model; tools -.-> NoopSeven\2ebefore_model; - tools -.-> __end__; classDef default fill:#f2f0ff,line-height:1.2 classDef first fill-opacity:0 classDef last fill:#bfb6fc @@ -475,7 +470,6 @@ __start__ --> NoopSeven\2ebefore_model; model --> NoopEight\2eafter_model; tools -.-> NoopSeven\2ebefore_model; - tools -.-> __end__; classDef default fill:#f2f0ff,line-height:1.2 classDef first fill-opacity:0 classDef last fill:#bfb6fc @@ -497,7 +491,6 @@ __start__ --> model; model -.-> __end__; model -.-> tools; - tools -.-> __end__; tools -.-> model; classDef default fill:#f2f0ff,line-height:1.2 classDef first fill-opacity:0 diff --git a/libs/langchain_v1/tests/unit_tests/agents/__snapshots__/test_return_direct_graph.ambr b/libs/langchain_v1/tests/unit_tests/agents/__snapshots__/test_return_direct_graph.ambr new file mode 100644 index 00000000000..f3e223f8df6 --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/__snapshots__/test_return_direct_graph.ambr @@ -0,0 +1,69 @@ +# serializer version: 1 +# name: test_agent_graph_with_mixed_tools + ''' + --- + config: + flowchart: + curve: linear + --- + graph TD; + __start__([

__start__

]):::first + model(model) + tools(tools) + __end__([

__end__

]):::last + __start__ --> model; + model -.-> __end__; + model -.-> tools; + tools -.-> __end__; + tools -.-> model; + classDef default fill:#f2f0ff,line-height:1.2 + classDef first fill-opacity:0 + classDef last fill:#bfb6fc + + ''' +# --- +# name: test_agent_graph_with_return_direct_tool + ''' + --- + config: + flowchart: + curve: linear + --- + graph TD; + __start__([

__start__

]):::first + model(model) + tools(tools) + __end__([

__end__

]):::last + __start__ --> model; + model -.-> __end__; + model -.-> tools; + tools -.-> __end__; + tools -.-> model; + classDef default fill:#f2f0ff,line-height:1.2 + classDef first fill-opacity:0 + classDef last fill:#bfb6fc + + ''' +# --- +# name: test_agent_graph_without_return_direct_tools + ''' + --- + config: + flowchart: + curve: linear + --- + graph TD; + __start__([

__start__

]):::first + model(model) + tools(tools) + __end__([

__end__

]):::last + __start__ --> model; + model -.-> __end__; + model -.-> tools; + tools -.-> model; + classDef default fill:#f2f0ff,line-height:1.2 + classDef first fill-opacity:0 + classDef last fill:#bfb6fc + + ''' +# --- diff --git a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_file_search.py b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_file_search.py new file mode 100644 index 00000000000..40aeedd71bd --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_file_search.py @@ -0,0 +1,261 @@ +"""Unit tests for file search middleware.""" + +from pathlib import Path +from typing import Any + +import pytest + +from langchain.agents.middleware.file_search import ( + FilesystemFileSearchMiddleware, +) + + +class TestFilesystemGrepSearch: + """Tests for filesystem-backed grep search.""" + + def test_grep_invalid_include_pattern(self, tmp_path: Path) -> None: + """Return error when include glob cannot be parsed.""" + + (tmp_path / "example.py").write_text("print('hello')\n", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path), use_ripgrep=False) + + result = middleware.grep_search.func(pattern="print", include="*.{py") + + assert result == "Invalid include pattern" + + def test_ripgrep_command_uses_literal_pattern( + self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Ensure ripgrep receives pattern after ``--`` to avoid option parsing.""" + + (tmp_path / "example.py").write_text("print('hello')\n", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path), use_ripgrep=True) + + captured: dict[str, list[str]] = {} + + class DummyResult: + stdout = "" + + def fake_run(*args: Any, **kwargs: Any) -> DummyResult: + cmd = args[0] + captured["cmd"] = cmd + return DummyResult() + + monkeypatch.setattr("langchain.agents.middleware.file_search.subprocess.run", fake_run) + + middleware._ripgrep_search("--pattern", "/", None) + + assert "cmd" in captured + cmd = captured["cmd"] + assert cmd[:2] == ["rg", "--json"] + assert "--" in cmd + separator_index = cmd.index("--") + assert cmd[separator_index + 1] == "--pattern" + + def test_grep_basic_search_python_fallback(self, tmp_path: Path) -> None: + """Test basic grep search using Python fallback.""" + (tmp_path / "file1.py").write_text("def hello():\n pass\n", encoding="utf-8") + (tmp_path / "file2.py").write_text("def world():\n pass\n", encoding="utf-8") + (tmp_path / "file3.txt").write_text("hello world\n", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path), use_ripgrep=False) + + result = middleware.grep_search.func(pattern="hello") + + assert "/file1.py" in result + assert "/file3.txt" in result + assert "/file2.py" not in result + + def test_grep_with_include_filter(self, tmp_path: Path) -> None: + """Test grep search with include pattern filter.""" + (tmp_path / "file1.py").write_text("def hello():\n pass\n", encoding="utf-8") + (tmp_path / "file2.txt").write_text("hello world\n", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path), use_ripgrep=False) + + result = middleware.grep_search.func(pattern="hello", include="*.py") + + assert "/file1.py" in result + assert "/file2.txt" not in result + + def test_grep_output_mode_content(self, tmp_path: Path) -> None: + """Test grep search with content output mode.""" + (tmp_path / "test.py").write_text("line1\nhello\nline3\n", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path), use_ripgrep=False) + + result = middleware.grep_search.func(pattern="hello", output_mode="content") + + assert "/test.py:2:hello" in result + + def test_grep_output_mode_count(self, tmp_path: Path) -> None: + """Test grep search with count output mode.""" + (tmp_path / "test.py").write_text("hello\nhello\nworld\n", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path), use_ripgrep=False) + + result = middleware.grep_search.func(pattern="hello", output_mode="count") + + assert "/test.py:2" in result + + def test_grep_invalid_regex_pattern(self, tmp_path: Path) -> None: + """Test grep search with invalid regex pattern.""" + (tmp_path / "test.py").write_text("hello\n", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path), use_ripgrep=False) + + result = middleware.grep_search.func(pattern="[invalid") + + assert "Invalid regex pattern" in result + + def test_grep_no_matches(self, tmp_path: Path) -> None: + """Test grep search with no matches.""" + (tmp_path / "test.py").write_text("hello\n", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path), use_ripgrep=False) + + result = middleware.grep_search.func(pattern="notfound") + + assert result == "No matches found" + + +class TestFilesystemGlobSearch: + """Tests for filesystem-backed glob search.""" + + def test_glob_basic_pattern(self, tmp_path: Path) -> None: + """Test basic glob pattern matching.""" + (tmp_path / "file1.py").write_text("content", encoding="utf-8") + (tmp_path / "file2.py").write_text("content", encoding="utf-8") + (tmp_path / "file3.txt").write_text("content", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path)) + + result = middleware.glob_search.func(pattern="*.py") + + assert "/file1.py" in result + assert "/file2.py" in result + assert "/file3.txt" not in result + + def test_glob_recursive_pattern(self, tmp_path: Path) -> None: + """Test recursive glob pattern matching.""" + (tmp_path / "src").mkdir() + (tmp_path / "src" / "test.py").write_text("content", encoding="utf-8") + (tmp_path / "src" / "nested").mkdir() + (tmp_path / "src" / "nested" / "deep.py").write_text("content", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path)) + + result = middleware.glob_search.func(pattern="**/*.py") + + assert "/src/test.py" in result + assert "/src/nested/deep.py" in result + + def test_glob_with_subdirectory_path(self, tmp_path: Path) -> None: + """Test glob search starting from subdirectory.""" + (tmp_path / "src").mkdir() + (tmp_path / "src" / "file1.py").write_text("content", encoding="utf-8") + (tmp_path / "other").mkdir() + (tmp_path / "other" / "file2.py").write_text("content", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path)) + + result = middleware.glob_search.func(pattern="*.py", path="/src") + + assert "/src/file1.py" in result + assert "/other/file2.py" not in result + + def test_glob_no_matches(self, tmp_path: Path) -> None: + """Test glob search with no matches.""" + (tmp_path / "file.txt").write_text("content", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path)) + + result = middleware.glob_search.func(pattern="*.py") + + assert result == "No files found" + + def test_glob_invalid_path(self, tmp_path: Path) -> None: + """Test glob search with non-existent path.""" + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path)) + + result = middleware.glob_search.func(pattern="*.py", path="/nonexistent") + + assert result == "No files found" + + +class TestPathTraversalSecurity: + """Security tests for path traversal protection.""" + + def test_path_traversal_with_double_dots(self, tmp_path: Path) -> None: + """Test that path traversal with .. is blocked.""" + (tmp_path / "allowed").mkdir() + (tmp_path / "allowed" / "file.txt").write_text("content", encoding="utf-8") + + # Create file outside root + parent = tmp_path.parent + (parent / "secret.txt").write_text("secret", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path / "allowed")) + + # Try to escape with .. + result = middleware.glob_search.func(pattern="*.txt", path="/../") + + assert result == "No files found" + assert "secret" not in result + + def test_path_traversal_with_absolute_path(self, tmp_path: Path) -> None: + """Test that absolute paths outside root are blocked.""" + (tmp_path / "allowed").mkdir() + + # Create file outside root + (tmp_path / "secret.txt").write_text("secret", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path / "allowed")) + + # Try to access with absolute path + result = middleware.glob_search.func(pattern="*.txt", path=str(tmp_path)) + + assert result == "No files found" + + def test_path_traversal_with_symlink(self, tmp_path: Path) -> None: + """Test that symlinks outside root are blocked.""" + (tmp_path / "allowed").mkdir() + (tmp_path / "secret.txt").write_text("secret", encoding="utf-8") + + # Create symlink from allowed dir to parent + try: + (tmp_path / "allowed" / "link").symlink_to(tmp_path) + except OSError: + pytest.skip("Symlink creation not supported") + + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path / "allowed")) + + # Try to access via symlink + result = middleware.glob_search.func(pattern="*.txt", path="/link") + + assert result == "No files found" + + def test_validate_path_blocks_tilde(self, tmp_path: Path) -> None: + """Test that tilde paths are handled safely.""" + middleware = FilesystemFileSearchMiddleware(root_path=str(tmp_path)) + + result = middleware.glob_search.func(pattern="*.txt", path="~/") + + assert result == "No files found" + + def test_grep_path_traversal_protection(self, tmp_path: Path) -> None: + """Test that grep also protects against path traversal.""" + (tmp_path / "allowed").mkdir() + (tmp_path / "secret.txt").write_text("secret content", encoding="utf-8") + + middleware = FilesystemFileSearchMiddleware( + root_path=str(tmp_path / "allowed"), use_ripgrep=False + ) + + # Try to search outside root + result = middleware.grep_search.func(pattern="secret", path="/../") + + assert result == "No matches found" + assert "secret" not in result diff --git a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_override_methods.py b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_override_methods.py new file mode 100644 index 00000000000..8005b40fdbd --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_override_methods.py @@ -0,0 +1,381 @@ +"""Unit tests for override() methods on ModelRequest and ToolCallRequest.""" + +import pytest +from langchain_core.language_models.fake_chat_models import GenericFakeChatModel +from langchain_core.messages import AIMessage, HumanMessage +from langchain_core.tools import BaseTool + +from langchain.agents.middleware.types import ModelRequest +from langchain.agents.middleware.types import ToolCallRequest + + +class TestModelRequestOverride: + """Test the ModelRequest.override() method.""" + + def test_override_single_attribute(self) -> None: + """Test overriding a single attribute.""" + model = GenericFakeChatModel(messages=iter([AIMessage(content="Hello")])) + original_request = ModelRequest( + model=model, + system_prompt="Original prompt", + messages=[HumanMessage("Hi")], + tool_choice=None, + tools=[], + response_format=None, + state={}, + runtime=None, + ) + + new_request = original_request.override(system_prompt="New prompt") + + # New request should have the overridden value + assert new_request.system_prompt == "New prompt" + # Original request should be unchanged (immutability) + assert original_request.system_prompt == "Original prompt" + # Other attributes should be the same + assert new_request.model == original_request.model + assert new_request.messages == original_request.messages + + def test_override_multiple_attributes(self) -> None: + """Test overriding multiple attributes at once.""" + model = GenericFakeChatModel(messages=iter([AIMessage(content="Hello")])) + original_request = ModelRequest( + model=model, + system_prompt="Original prompt", + messages=[HumanMessage("Hi")], + tool_choice=None, + tools=[], + response_format=None, + state={"count": 1}, + runtime=None, + ) + + new_request = original_request.override( + system_prompt="New prompt", + tool_choice="auto", + state={"count": 2}, + ) + + # Overridden values should be changed + assert new_request.system_prompt == "New prompt" + assert new_request.tool_choice == "auto" + assert new_request.state == {"count": 2} + # Original should be unchanged + assert original_request.system_prompt == "Original prompt" + assert original_request.tool_choice is None + assert original_request.state == {"count": 1} + + def test_override_messages(self) -> None: + """Test overriding messages list.""" + model = GenericFakeChatModel(messages=iter([AIMessage(content="Hello")])) + original_messages = [HumanMessage("Hi")] + new_messages = [HumanMessage("Hello"), AIMessage("Hi there")] + + original_request = ModelRequest( + model=model, + system_prompt=None, + messages=original_messages, + tool_choice=None, + tools=[], + response_format=None, + state={}, + runtime=None, + ) + + new_request = original_request.override(messages=new_messages) + + assert new_request.messages == new_messages + assert original_request.messages == original_messages + assert len(new_request.messages) == 2 + assert len(original_request.messages) == 1 + + def test_override_model_settings(self) -> None: + """Test overriding model_settings dict.""" + model = GenericFakeChatModel(messages=iter([AIMessage(content="Hello")])) + original_request = ModelRequest( + model=model, + system_prompt=None, + messages=[HumanMessage("Hi")], + tool_choice=None, + tools=[], + response_format=None, + state={}, + runtime=None, + model_settings={"temperature": 0.5}, + ) + + new_request = original_request.override( + model_settings={"temperature": 0.9, "max_tokens": 100} + ) + + assert new_request.model_settings == {"temperature": 0.9, "max_tokens": 100} + assert original_request.model_settings == {"temperature": 0.5} + + def test_override_with_none_value(self) -> None: + """Test overriding with None value.""" + model = GenericFakeChatModel(messages=iter([AIMessage(content="Hello")])) + original_request = ModelRequest( + model=model, + system_prompt="Original prompt", + messages=[HumanMessage("Hi")], + tool_choice="auto", + tools=[], + response_format=None, + state={}, + runtime=None, + ) + + new_request = original_request.override( + system_prompt=None, + tool_choice=None, + ) + + assert new_request.system_prompt is None + assert new_request.tool_choice is None + assert original_request.system_prompt == "Original prompt" + assert original_request.tool_choice == "auto" + + def test_override_preserves_identity_of_unchanged_objects(self) -> None: + """Test that unchanged attributes maintain object identity.""" + model = GenericFakeChatModel(messages=iter([AIMessage(content="Hello")])) + messages = [HumanMessage("Hi")] + state = {"key": "value"} + + original_request = ModelRequest( + model=model, + system_prompt="Original prompt", + messages=messages, + tool_choice=None, + tools=[], + response_format=None, + state=state, + runtime=None, + ) + + new_request = original_request.override(system_prompt="New prompt") + + # Unchanged objects should be the same instance + assert new_request.messages is messages + assert new_request.state is state + assert new_request.model is model + + def test_override_chaining(self) -> None: + """Test chaining multiple override calls.""" + model = GenericFakeChatModel(messages=iter([AIMessage(content="Hello")])) + original_request = ModelRequest( + model=model, + system_prompt="Prompt 1", + messages=[HumanMessage("Hi")], + tool_choice=None, + tools=[], + response_format=None, + state={"count": 1}, + runtime=None, + ) + + final_request = ( + original_request.override(system_prompt="Prompt 2") + .override(state={"count": 2}) + .override(tool_choice="auto") + ) + + assert final_request.system_prompt == "Prompt 2" + assert final_request.state == {"count": 2} + assert final_request.tool_choice == "auto" + # Original should be unchanged + assert original_request.system_prompt == "Prompt 1" + assert original_request.state == {"count": 1} + assert original_request.tool_choice is None + + +class TestToolCallRequestOverride: + """Test the ToolCallRequest.override() method.""" + + def test_override_tool_call(self) -> None: + """Test overriding tool_call dict.""" + from langchain_core.tools import tool + + @tool + def test_tool(x: int) -> str: + """A test tool.""" + return f"Result: {x}" + + original_call = {"name": "test_tool", "args": {"x": 5}, "id": "1", "type": "tool_call"} + modified_call = {"name": "test_tool", "args": {"x": 10}, "id": "1", "type": "tool_call"} + + original_request = ToolCallRequest( + tool_call=original_call, + tool=test_tool, + state={"messages": []}, + runtime=None, + ) + + new_request = original_request.override(tool_call=modified_call) + + # New request should have modified tool_call + assert new_request.tool_call["args"]["x"] == 10 + # Original should be unchanged + assert original_request.tool_call["args"]["x"] == 5 + # Other attributes should be the same + assert new_request.tool is original_request.tool + assert new_request.state is original_request.state + + def test_override_state(self) -> None: + """Test overriding state.""" + from langchain_core.tools import tool + + @tool + def test_tool(x: int) -> str: + """A test tool.""" + return f"Result: {x}" + + tool_call = {"name": "test_tool", "args": {"x": 5}, "id": "1", "type": "tool_call"} + original_state = {"messages": [HumanMessage("Hi")]} + new_state = {"messages": [HumanMessage("Hi"), AIMessage("Hello")]} + + original_request = ToolCallRequest( + tool_call=tool_call, + tool=test_tool, + state=original_state, + runtime=None, + ) + + new_request = original_request.override(state=new_state) + + assert len(new_request.state["messages"]) == 2 + assert len(original_request.state["messages"]) == 1 + + def test_override_multiple_attributes(self) -> None: + """Test overriding multiple attributes at once.""" + from langchain_core.tools import tool + + @tool + def test_tool(x: int) -> str: + """A test tool.""" + return f"Result: {x}" + + @tool + def another_tool(y: str) -> str: + """Another test tool.""" + return f"Output: {y}" + + original_call = {"name": "test_tool", "args": {"x": 5}, "id": "1", "type": "tool_call"} + modified_call = { + "name": "another_tool", + "args": {"y": "hello"}, + "id": "2", + "type": "tool_call", + } + + original_request = ToolCallRequest( + tool_call=original_call, + tool=test_tool, + state={"count": 1}, + runtime=None, + ) + + new_request = original_request.override( + tool_call=modified_call, + tool=another_tool, + state={"count": 2}, + ) + + assert new_request.tool_call["name"] == "another_tool" + assert new_request.tool.name == "another_tool" + assert new_request.state == {"count": 2} + # Original unchanged + assert original_request.tool_call["name"] == "test_tool" + assert original_request.tool.name == "test_tool" + assert original_request.state == {"count": 1} + + def test_override_with_copy_pattern(self) -> None: + """Test common pattern of copying and modifying tool_call.""" + from langchain_core.tools import tool + + @tool + def test_tool(value: int) -> str: + """A test tool.""" + return f"Result: {value}" + + original_call = { + "name": "test_tool", + "args": {"value": 5}, + "id": "call_123", + "type": "tool_call", + } + + original_request = ToolCallRequest( + tool_call=original_call, + tool=test_tool, + state={}, + runtime=None, + ) + + # Common pattern: copy tool_call and modify args + modified_call = {**original_request.tool_call, "args": {"value": 10}} + new_request = original_request.override(tool_call=modified_call) + + assert new_request.tool_call["args"]["value"] == 10 + assert new_request.tool_call["id"] == "call_123" + assert new_request.tool_call["name"] == "test_tool" + # Original unchanged + assert original_request.tool_call["args"]["value"] == 5 + + def test_override_preserves_identity(self) -> None: + """Test that unchanged attributes maintain object identity.""" + from langchain_core.tools import tool + + @tool + def test_tool(x: int) -> str: + """A test tool.""" + return f"Result: {x}" + + tool_call = {"name": "test_tool", "args": {"x": 5}, "id": "1", "type": "tool_call"} + state = {"messages": []} + + original_request = ToolCallRequest( + tool_call=tool_call, + tool=test_tool, + state=state, + runtime=None, + ) + + new_call = {"name": "test_tool", "args": {"x": 10}, "id": "1", "type": "tool_call"} + new_request = original_request.override(tool_call=new_call) + + # Unchanged objects should be the same instance + assert new_request.tool is test_tool + assert new_request.state is state + + def test_override_chaining(self) -> None: + """Test chaining multiple override calls.""" + from langchain_core.tools import tool + + @tool + def test_tool(x: int) -> str: + """A test tool.""" + return f"Result: {x}" + + tool_call = {"name": "test_tool", "args": {"x": 5}, "id": "1", "type": "tool_call"} + + original_request = ToolCallRequest( + tool_call=tool_call, + tool=test_tool, + state={"count": 1}, + runtime=None, + ) + + call_2 = {"name": "test_tool", "args": {"x": 10}, "id": "1", "type": "tool_call"} + call_3 = {"name": "test_tool", "args": {"x": 15}, "id": "1", "type": "tool_call"} + + final_request = ( + original_request.override(tool_call=call_2) + .override(state={"count": 2}) + .override(tool_call=call_3) + ) + + assert final_request.tool_call["args"]["x"] == 15 + assert final_request.state == {"count": 2} + # Original unchanged + assert original_request.tool_call["args"]["x"] == 5 + assert original_request.state == {"count": 1} diff --git a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_shell_execution_policies.py b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_shell_execution_policies.py new file mode 100644 index 00000000000..237e69f5fea --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_shell_execution_policies.py @@ -0,0 +1,404 @@ +from __future__ import annotations + +import os +import shutil +from pathlib import Path +from typing import Any + +import pytest + +from langchain.agents.middleware.shell_tool import ( + HostExecutionPolicy, + CodexSandboxExecutionPolicy, + DockerExecutionPolicy, +) + +from langchain.agents.middleware import _execution + + +def _make_resource( + *, + with_prlimit: bool, + has_rlimit_as: bool = True, +) -> Any: + """Create a fake ``resource`` module for testing.""" + + class _BaseResource: + RLIMIT_CPU = 0 + RLIMIT_DATA = 2 + + if has_rlimit_as: + RLIMIT_AS = 1 + + def __init__(self) -> None: + self.prlimit_calls: list[tuple[int, int, tuple[int, int]]] = [] + self.setrlimit_calls: list[tuple[int, tuple[int, int]]] = [] + + def setrlimit(self, resource_name: int, limits: tuple[int, int]) -> None: + self.setrlimit_calls.append((resource_name, limits)) + + if with_prlimit: + + class _Resource(_BaseResource): + def prlimit(self, pid: int, resource_name: int, limits: tuple[int, int]) -> None: + self.prlimit_calls.append((pid, resource_name, limits)) + + else: + + class _Resource(_BaseResource): + pass + + return _Resource() + + +def test_host_policy_validations() -> None: + with pytest.raises(ValueError): + HostExecutionPolicy(max_output_lines=0) + + with pytest.raises(ValueError): + HostExecutionPolicy(cpu_time_seconds=0) + + with pytest.raises(ValueError): + HostExecutionPolicy(memory_bytes=-1) + + +def test_host_policy_requires_resource_for_limits(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr(_execution, "resource", None, raising=False) + with pytest.raises(RuntimeError): + HostExecutionPolicy(cpu_time_seconds=1) + + +def test_host_policy_applies_prlimit(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None: + fake_resource = _make_resource(with_prlimit=True) + monkeypatch.setattr(_execution, "resource", fake_resource, raising=False) + monkeypatch.setattr(_execution.sys, "platform", "linux") + + recorded: dict[str, Any] = {} + + class DummyProcess: + pid = 1234 + + def fake_launch(command, *, env, cwd, preexec_fn, start_new_session): # noqa: ANN001 + recorded["command"] = list(command) + recorded["env"] = dict(env) + recorded["cwd"] = cwd + recorded["preexec_fn"] = preexec_fn + recorded["start_new_session"] = start_new_session + return DummyProcess() + + monkeypatch.setattr(_execution, "_launch_subprocess", fake_launch) + + policy = HostExecutionPolicy(cpu_time_seconds=2, memory_bytes=4096) + env = {"PATH": os.environ.get("PATH", ""), "VAR": "1"} + process = policy.spawn(workspace=tmp_path, env=env, command=("/bin/sh",)) + + assert process is not None + assert recorded["preexec_fn"] is None + assert recorded["start_new_session"] is True + assert fake_resource.prlimit_calls == [ + (1234, fake_resource.RLIMIT_CPU, (2, 2)), + (1234, fake_resource.RLIMIT_AS, (4096, 4096)), + ] + assert fake_resource.setrlimit_calls == [] + + +def test_host_policy_uses_preexec_on_macos(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None: + fake_resource = _make_resource(with_prlimit=False) + monkeypatch.setattr(_execution, "resource", fake_resource, raising=False) + monkeypatch.setattr(_execution.sys, "platform", "darwin") + + captured: dict[str, Any] = {} + + class DummyProcess: + pid = 4321 + + def fake_launch(command, *, env, cwd, preexec_fn, start_new_session): # noqa: ANN001 + captured["preexec_fn"] = preexec_fn + captured["start_new_session"] = start_new_session + return DummyProcess() + + monkeypatch.setattr(_execution, "_launch_subprocess", fake_launch) + + policy = HostExecutionPolicy(cpu_time_seconds=5, memory_bytes=8192) + env = {"PATH": os.environ.get("PATH", "")} + policy.spawn(workspace=tmp_path, env=env, command=("/bin/sh",)) + + preexec_fn = captured["preexec_fn"] + assert callable(preexec_fn) + assert captured["start_new_session"] is True + + preexec_fn() + # macOS fallback should use setrlimit + assert fake_resource.setrlimit_calls == [ + (fake_resource.RLIMIT_CPU, (5, 5)), + (fake_resource.RLIMIT_AS, (8192, 8192)), + ] + + +def test_host_policy_respects_process_group_flag( + monkeypatch: pytest.MonkeyPatch, tmp_path: Path +) -> None: + fake_resource = _make_resource(with_prlimit=True) + monkeypatch.setattr(_execution, "resource", fake_resource, raising=False) + monkeypatch.setattr(_execution.sys, "platform", "linux") + + recorded: dict[str, Any] = {} + + class DummyProcess: + pid = 1111 + + def fake_launch(command, *, env, cwd, preexec_fn, start_new_session): # noqa: ANN001 + recorded["start_new_session"] = start_new_session + return DummyProcess() + + monkeypatch.setattr(_execution, "_launch_subprocess", fake_launch) + + policy = HostExecutionPolicy(create_process_group=False) + env = {"PATH": os.environ.get("PATH", "")} + policy.spawn(workspace=tmp_path, env=env, command=("/bin/sh",)) + + assert recorded["start_new_session"] is False + + +def test_host_policy_falls_back_to_rlimit_data( + monkeypatch: pytest.MonkeyPatch, tmp_path: Path +) -> None: + fake_resource = _make_resource(with_prlimit=True, has_rlimit_as=False) + monkeypatch.setattr(_execution, "resource", fake_resource, raising=False) + monkeypatch.setattr(_execution.sys, "platform", "linux") + + class DummyProcess: + pid = 2222 + + def fake_launch(command, *, env, cwd, preexec_fn, start_new_session): # noqa: ANN001 + return DummyProcess() + + monkeypatch.setattr(_execution, "_launch_subprocess", fake_launch) + + policy = HostExecutionPolicy(cpu_time_seconds=7, memory_bytes=2048) + env = {"PATH": os.environ.get("PATH", "")} + policy.spawn(workspace=tmp_path, env=env, command=("/bin/sh",)) + + assert fake_resource.prlimit_calls == [ + (2222, fake_resource.RLIMIT_CPU, (7, 7)), + (2222, fake_resource.RLIMIT_DATA, (2048, 2048)), + ] + + +@pytest.mark.skipif( + shutil.which("codex") is None, + reason="codex CLI not available on PATH", +) +def test_codex_policy_spawns_codex_cli(monkeypatch, tmp_path: Path) -> None: + recorded: dict[str, list[str]] = {} + + class DummyProcess: + pass + + def fake_launch(command, *, env, cwd, preexec_fn, start_new_session): # noqa: ANN001 + recorded["command"] = list(command) + assert cwd == tmp_path + assert env["TEST_VAR"] == "1" + assert preexec_fn is None + assert not start_new_session + return DummyProcess() + + monkeypatch.setattr( + "langchain.agents.middleware._execution._launch_subprocess", + fake_launch, + ) + policy = CodexSandboxExecutionPolicy( + platform="linux", + config_overrides={"sandbox_permissions": ["disk-full-read-access"]}, + ) + + env = {"TEST_VAR": "1"} + policy.spawn(workspace=tmp_path, env=env, command=("/bin/bash",)) + + expected = [ + shutil.which("codex"), + "sandbox", + "linux", + "-c", + 'sandbox_permissions=["disk-full-read-access"]', + "--", + "/bin/bash", + ] + assert recorded["command"] == expected + + +def test_codex_policy_auto_platform_linux(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr(_execution.sys, "platform", "linux") + policy = CodexSandboxExecutionPolicy(platform="auto") + assert policy._determine_platform() == "linux" + + +def test_codex_policy_auto_platform_macos(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr(_execution.sys, "platform", "darwin") + policy = CodexSandboxExecutionPolicy(platform="auto") + assert policy._determine_platform() == "macos" + + +def test_codex_policy_resolve_missing_binary(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr(_execution.shutil, "which", lambda _: None) + policy = CodexSandboxExecutionPolicy(binary="codex") + with pytest.raises(RuntimeError): + policy._resolve_binary() + + +def test_codex_policy_auto_platform_failure(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr(_execution.sys, "platform", "win32") + policy = CodexSandboxExecutionPolicy(platform="auto") + with pytest.raises(RuntimeError): + policy._determine_platform() + + +def test_codex_policy_formats_override_values() -> None: + policy = CodexSandboxExecutionPolicy() + assert policy._format_override({"a": 1}) == '{"a": 1}' + + class Custom: + def __str__(self) -> str: + return "custom" + + assert policy._format_override(Custom()) == "custom" + + +def test_codex_policy_sorts_config_overrides(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr(_execution.shutil, "which", lambda _: "/usr/bin/codex") + policy = CodexSandboxExecutionPolicy( + config_overrides={"b": 2, "a": 1}, + platform="linux", + ) + command = policy._build_command(("echo",)) + indices = [i for i, part in enumerate(command) if part == "-c"] + override_values = [command[i + 1] for i in indices] + assert override_values == ["a=1", "b=2"] + + +@pytest.mark.skipif( + shutil.which("docker") is None, + reason="docker CLI not available on PATH", +) +def test_docker_policy_spawns_docker_run(monkeypatch, tmp_path: Path) -> None: + recorded: dict[str, list[str]] = {} + + class DummyProcess: + pass + + def fake_launch(command, *, env, cwd, preexec_fn, start_new_session): # noqa: ANN001 + recorded["command"] = list(command) + assert cwd == tmp_path + assert "PATH" in env # host environment should retain system PATH + assert not start_new_session + return DummyProcess() + + monkeypatch.setattr( + "langchain.agents.middleware._execution._launch_subprocess", + fake_launch, + ) + policy = DockerExecutionPolicy( + image="ubuntu:22.04", + memory_bytes=4096, + extra_run_args=("--ipc", "host"), + ) + + env = {"PATH": "/bin"} + policy.spawn(workspace=tmp_path, env=env, command=("/bin/bash",)) + + command = recorded["command"] + assert command[0] == shutil.which("docker") + assert command[1:4] == ["run", "-i", "--rm"] + assert "--memory" in command + assert "4096" in command + assert "-v" in command and any(str(tmp_path) in part for part in command) + assert "-w" in command + w_index = command.index("-w") + assert command[w_index + 1] == str(tmp_path) + assert "-e" in command and "PATH=/bin" in command + assert command[-2:] == ["ubuntu:22.04", "/bin/bash"] + + +def test_docker_policy_rejects_cpu_limit() -> None: + with pytest.raises(RuntimeError): + DockerExecutionPolicy(cpu_time_seconds=1) + + +def test_docker_policy_validates_memory() -> None: + with pytest.raises(ValueError): + DockerExecutionPolicy(memory_bytes=0) + + +def test_docker_policy_skips_mount_for_temp_workspace( + monkeypatch: pytest.MonkeyPatch, tmp_path: Path +) -> None: + monkeypatch.setattr(_execution.shutil, "which", lambda _: "/usr/bin/docker") + + recorded: dict[str, list[str]] = {} + + class DummyProcess: + pass + + def fake_launch(command, *, env, cwd, preexec_fn, start_new_session): # noqa: ANN001 + recorded["command"] = list(command) + assert cwd == workspace + return DummyProcess() + + monkeypatch.setattr(_execution, "_launch_subprocess", fake_launch) + + workspace = tmp_path / f"{_execution.SHELL_TEMP_PREFIX}case" + workspace.mkdir() + policy = DockerExecutionPolicy(cpus="1.5") + env = {"PATH": "/bin"} + policy.spawn(workspace=workspace, env=env, command=("/bin/sh",)) + + command = recorded["command"] + assert "-v" not in command + assert "-w" in command + w_index = command.index("-w") + assert command[w_index + 1] == "/" + assert "--cpus" in command + assert "--network" in command and "none" in command + assert command[-2:] == [policy.image, "/bin/sh"] + + +def test_docker_policy_validates_cpus() -> None: + with pytest.raises(ValueError): + DockerExecutionPolicy(cpus=" ") + + +def test_docker_policy_validates_user() -> None: + with pytest.raises(ValueError): + DockerExecutionPolicy(user=" ") + + +def test_docker_policy_read_only_and_user(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None: + monkeypatch.setattr(_execution.shutil, "which", lambda _: "/usr/bin/docker") + + recorded: dict[str, list[str]] = {} + + class DummyProcess: + pass + + def fake_launch(command, *, env, cwd, preexec_fn, start_new_session): # noqa: ANN001 + recorded["command"] = list(command) + return DummyProcess() + + monkeypatch.setattr(_execution, "_launch_subprocess", fake_launch) + + workspace = tmp_path + policy = DockerExecutionPolicy(read_only_rootfs=True, user="1000:1000") + policy.spawn(workspace=workspace, env={"PATH": "/bin"}, command=("/bin/sh",)) + + command = recorded["command"] + assert "--read-only" in command + assert "--user" in command + user_index = command.index("--user") + assert command[user_index + 1] == "1000:1000" + + +def test_docker_policy_resolve_missing_binary(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr(_execution.shutil, "which", lambda _: None) + policy = DockerExecutionPolicy() + with pytest.raises(RuntimeError): + policy._resolve_binary() diff --git a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_shell_tool.py b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_shell_tool.py new file mode 100644 index 00000000000..37891df9412 --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_shell_tool.py @@ -0,0 +1,175 @@ +from __future__ import annotations + +import time +import gc +import tempfile +from pathlib import Path + +import pytest + +from langchain.agents.middleware.shell_tool import ( + HostExecutionPolicy, + ShellToolMiddleware, + _SessionResources, + RedactionRule, +) +from langchain.agents.middleware.types import AgentState + + +def _empty_state() -> AgentState: + return {"messages": []} # type: ignore[return-value] + + +def test_executes_command_and_persists_state(tmp_path: Path) -> None: + workspace = tmp_path / "workspace" + middleware = ShellToolMiddleware(workspace_root=workspace) + try: + state: AgentState = _empty_state() + updates = middleware.before_agent(state, None) + if updates: + state.update(updates) + resources = middleware._ensure_resources(state) # type: ignore[attr-defined] + + middleware._run_shell_tool(resources, {"command": "cd /"}, tool_call_id=None) + result = middleware._run_shell_tool(resources, {"command": "pwd"}, tool_call_id=None) + assert isinstance(result, str) + assert result.strip() == "/" + echo_result = middleware._run_shell_tool( + resources, {"command": "echo ready"}, tool_call_id=None + ) + assert "ready" in echo_result + finally: + updates = middleware.after_agent(state, None) + if updates: + state.update(updates) + + +def test_restart_resets_session_environment(tmp_path: Path) -> None: + middleware = ShellToolMiddleware(workspace_root=tmp_path / "workspace") + try: + state: AgentState = _empty_state() + updates = middleware.before_agent(state, None) + if updates: + state.update(updates) + resources = middleware._ensure_resources(state) # type: ignore[attr-defined] + + middleware._run_shell_tool(resources, {"command": "export FOO=bar"}, tool_call_id=None) + restart_message = middleware._run_shell_tool( + resources, {"restart": True}, tool_call_id=None + ) + assert "restarted" in restart_message.lower() + resources = middleware._ensure_resources(state) # reacquire after restart + result = middleware._run_shell_tool( + resources, {"command": "echo ${FOO:-unset}"}, tool_call_id=None + ) + assert "unset" in result + finally: + updates = middleware.after_agent(state, None) + if updates: + state.update(updates) + + +def test_truncation_indicator_present(tmp_path: Path) -> None: + policy = HostExecutionPolicy(max_output_lines=5, command_timeout=5.0) + middleware = ShellToolMiddleware(workspace_root=tmp_path / "workspace", execution_policy=policy) + try: + state: AgentState = _empty_state() + updates = middleware.before_agent(state, None) + if updates: + state.update(updates) + resources = middleware._ensure_resources(state) # type: ignore[attr-defined] + result = middleware._run_shell_tool(resources, {"command": "seq 1 20"}, tool_call_id=None) + assert "Output truncated" in result + finally: + updates = middleware.after_agent(state, None) + if updates: + state.update(updates) + + +def test_timeout_returns_error(tmp_path: Path) -> None: + policy = HostExecutionPolicy(command_timeout=0.5) + middleware = ShellToolMiddleware(workspace_root=tmp_path / "workspace", execution_policy=policy) + try: + state: AgentState = _empty_state() + updates = middleware.before_agent(state, None) + if updates: + state.update(updates) + resources = middleware._ensure_resources(state) # type: ignore[attr-defined] + start = time.monotonic() + result = middleware._run_shell_tool(resources, {"command": "sleep 2"}, tool_call_id=None) + elapsed = time.monotonic() - start + assert elapsed < policy.command_timeout + 2.0 + assert "timed out" in result.lower() + finally: + updates = middleware.after_agent(state, None) + if updates: + state.update(updates) + + +def test_redaction_policy_applies(tmp_path: Path) -> None: + middleware = ShellToolMiddleware( + workspace_root=tmp_path / "workspace", + redaction_rules=(RedactionRule(pii_type="email", strategy="redact"),), + ) + try: + state: AgentState = _empty_state() + updates = middleware.before_agent(state, None) + if updates: + state.update(updates) + resources = middleware._ensure_resources(state) # type: ignore[attr-defined] + message = middleware._run_shell_tool( + resources, + {"command": "printf 'Contact: user@example.com\\n'"}, + tool_call_id=None, + ) + assert "[REDACTED_EMAIL]" in message + assert "user@example.com" not in message + finally: + updates = middleware.after_agent(state, None) + if updates: + state.update(updates) + + +def test_startup_and_shutdown_commands(tmp_path: Path) -> None: + workspace = tmp_path / "workspace" + middleware = ShellToolMiddleware( + workspace_root=workspace, + startup_commands=("touch startup.txt",), + shutdown_commands=("touch shutdown.txt",), + ) + try: + state: AgentState = _empty_state() + updates = middleware.before_agent(state, None) + if updates: + state.update(updates) + assert (workspace / "startup.txt").exists() + finally: + updates = middleware.after_agent(state, None) + if updates: + state.update(updates) + assert (workspace / "shutdown.txt").exists() + + +def test_session_resources_finalizer_cleans_up(tmp_path: Path) -> None: + policy = HostExecutionPolicy(termination_timeout=0.1) + + class DummySession: + def __init__(self) -> None: + self.stopped: bool = False + + def stop(self, timeout: float) -> None: # noqa: ARG002 + self.stopped = True + + session = DummySession() + tempdir = tempfile.TemporaryDirectory(dir=tmp_path) + tempdir_path = Path(tempdir.name) + resources = _SessionResources(session=session, tempdir=tempdir, policy=policy) # type: ignore[arg-type] + finalizer = resources._finalizer + + # Drop our last strong reference and force collection. + del resources + gc.collect() + + assert not finalizer.alive + assert session.stopped + assert not tempdir_path.exists() diff --git a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_structured_output_retry.py b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_structured_output_retry.py new file mode 100644 index 00000000000..a04f670ad4a --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_structured_output_retry.py @@ -0,0 +1,369 @@ +"""Tests for StructuredOutputRetryMiddleware functionality.""" + +from collections.abc import Callable + +import pytest +from langchain_core.messages import HumanMessage +from langchain_core.tools import tool +from langgraph.checkpoint.memory import InMemorySaver +from pydantic import BaseModel + +from langchain.agents import create_agent +from langchain.agents.middleware.types import ( + AgentMiddleware, + ModelRequest, + ModelResponse, +) +from langchain.agents.structured_output import StructuredOutputError, ToolStrategy +from tests.unit_tests.agents.model import FakeToolCallingModel + + +class StructuredOutputRetryMiddleware(AgentMiddleware): + """Retries model calls when structured output parsing fails.""" + + def __init__(self, max_retries: int) -> None: + """Initialize the structured output retry middleware. + + Args: + max_retries: Maximum number of retry attempts. + """ + self.max_retries = max_retries + + def wrap_model_call( + self, request: ModelRequest, handler: Callable[[ModelRequest], ModelResponse] + ) -> ModelResponse: + """Intercept and control model execution via handler callback. + + Args: + request: The model request containing messages and configuration. + handler: The function to call the model. + + Returns: + The model response. + + Raises: + StructuredOutputError: If max retries exceeded without success. + """ + for attempt in range(self.max_retries + 1): + try: + return handler(request) + except StructuredOutputError as exc: + if attempt == self.max_retries: + raise + + # Include both the AI message and error in a single human message + # to maintain valid chat history alternation + ai_content = exc.ai_message.content + error_message = ( + f"Your previous response was:\n{ai_content}\n\n" + f"Error: {exc}. Please try again with a valid response." + ) + request.messages.append(HumanMessage(content=error_message)) + + # This should never be reached, but satisfies type checker + return handler(request) + + +class WeatherReport(BaseModel): + """Weather report schema for testing.""" + + temperature: float + conditions: str + + +@tool +def get_weather(city: str) -> str: + """Get the weather for a given city. + + Args: + city: The city to get weather for. + + Returns: + Weather information for the city. + """ + return f"The weather in {city} is sunny and 72 degrees." + + +def test_structured_output_retry_first_attempt_invalid() -> None: + """Test structured output retry when first two attempts have invalid output.""" + # First two attempts have invalid tool arguments, third attempt succeeds + # The model will call the WeatherReport structured output tool + tool_calls = [ + # First attempt - invalid: wrong type for temperature + [ + { + "name": "WeatherReport", + "id": "1", + "args": {"temperature": "not-a-float", "conditions": "sunny"}, + } + ], + # Second attempt - invalid: missing required field + [{"name": "WeatherReport", "id": "2", "args": {"temperature": 72.5}}], + # Third attempt - valid + [ + { + "name": "WeatherReport", + "id": "3", + "args": {"temperature": 72.5, "conditions": "sunny"}, + } + ], + ] + + model = FakeToolCallingModel(tool_calls=tool_calls) + retry_middleware = StructuredOutputRetryMiddleware(max_retries=2) + + agent = create_agent( + model=model, + tools=[get_weather], + middleware=[retry_middleware], + response_format=ToolStrategy(schema=WeatherReport, handle_errors=False), + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("What's the weather in Tokyo?")]}, + {"configurable": {"thread_id": "test"}}, + ) + + # Verify we got a structured response + assert "structured_response" in result + structured = result["structured_response"] + assert isinstance(structured, WeatherReport) + assert structured.temperature == 72.5 + assert structured.conditions == "sunny" + + # Verify the model was called 3 times (initial + 2 retries) + assert model.index == 3 + + +def test_structured_output_retry_exceeds_max_retries() -> None: + """Test structured output retry raises error when max retries exceeded.""" + # All three attempts return invalid arguments + tool_calls = [ + [ + { + "name": "WeatherReport", + "id": "1", + "args": {"temperature": "invalid", "conditions": "sunny"}, + } + ], + [ + { + "name": "WeatherReport", + "id": "2", + "args": {"temperature": "also-invalid", "conditions": "cloudy"}, + } + ], + [ + { + "name": "WeatherReport", + "id": "3", + "args": {"temperature": "still-invalid", "conditions": "rainy"}, + } + ], + ] + + model = FakeToolCallingModel(tool_calls=tool_calls) + retry_middleware = StructuredOutputRetryMiddleware(max_retries=2) + + agent = create_agent( + model=model, + tools=[get_weather], + middleware=[retry_middleware], + response_format=ToolStrategy(schema=WeatherReport, handle_errors=False), + # No checkpointer - we expect this to fail + ) + + # Should raise StructuredOutputError after exhausting retries + with pytest.raises(StructuredOutputError): + agent.invoke( + {"messages": [HumanMessage("What's the weather in Tokyo?")]}, + ) + + # Verify the model was called 3 times (initial + 2 retries) + assert model.index == 3 + + +def test_structured_output_retry_succeeds_first_attempt() -> None: + """Test structured output retry when first attempt succeeds (no retry needed).""" + # First attempt returns valid structured output + tool_calls = [ + [ + { + "name": "WeatherReport", + "id": "1", + "args": {"temperature": 68.0, "conditions": "cloudy"}, + } + ], + ] + + model = FakeToolCallingModel(tool_calls=tool_calls) + retry_middleware = StructuredOutputRetryMiddleware(max_retries=2) + + agent = create_agent( + model=model, + tools=[get_weather], + middleware=[retry_middleware], + response_format=ToolStrategy(schema=WeatherReport, handle_errors=False), + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("What's the weather in Paris?")]}, + {"configurable": {"thread_id": "test"}}, + ) + + # Verify we got a structured response + assert "structured_response" in result + structured = result["structured_response"] + assert isinstance(structured, WeatherReport) + assert structured.temperature == 68.0 + assert structured.conditions == "cloudy" + + # Verify the model was called only once + assert model.index == 1 + + +def test_structured_output_retry_validation_error() -> None: + """Test structured output retry with schema validation errors.""" + # First attempt has wrong type, second has missing field, third succeeds + tool_calls = [ + [ + { + "name": "WeatherReport", + "id": "1", + "args": {"temperature": "seventy-two", "conditions": "sunny"}, + } + ], + [{"name": "WeatherReport", "id": "2", "args": {"temperature": 72.5}}], + [ + { + "name": "WeatherReport", + "id": "3", + "args": {"temperature": 72.5, "conditions": "partly cloudy"}, + } + ], + ] + + model = FakeToolCallingModel(tool_calls=tool_calls) + retry_middleware = StructuredOutputRetryMiddleware(max_retries=2) + + agent = create_agent( + model=model, + tools=[get_weather], + middleware=[retry_middleware], + response_format=ToolStrategy(schema=WeatherReport, handle_errors=False), + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("What's the weather in London?")]}, + {"configurable": {"thread_id": "test"}}, + ) + + # Verify we got a structured response + assert "structured_response" in result + structured = result["structured_response"] + assert isinstance(structured, WeatherReport) + assert structured.temperature == 72.5 + assert structured.conditions == "partly cloudy" + + # Verify the model was called 3 times + assert model.index == 3 + + +def test_structured_output_retry_zero_retries() -> None: + """Test structured output retry with max_retries=0 (no retries allowed).""" + # First attempt returns invalid arguments + tool_calls = [ + [ + { + "name": "WeatherReport", + "id": "1", + "args": {"temperature": "invalid", "conditions": "sunny"}, + } + ], + [ + { + "name": "WeatherReport", + "id": "2", + "args": {"temperature": 72.5, "conditions": "sunny"}, + } + ], # Would succeed if retried + ] + + model = FakeToolCallingModel(tool_calls=tool_calls) + retry_middleware = StructuredOutputRetryMiddleware(max_retries=0) + + agent = create_agent( + model=model, + tools=[get_weather], + middleware=[retry_middleware], + response_format=ToolStrategy(schema=WeatherReport, handle_errors=False), + checkpointer=InMemorySaver(), + ) + + # Should fail immediately without retrying + with pytest.raises(StructuredOutputError): + agent.invoke( + {"messages": [HumanMessage("What's the weather in Berlin?")]}, + {"configurable": {"thread_id": "test"}}, + ) + + # Verify the model was called only once (no retries) + assert model.index == 1 + + +def test_structured_output_retry_preserves_messages() -> None: + """Test structured output retry preserves error feedback in messages.""" + # First attempt invalid, second succeeds + tool_calls = [ + [ + { + "name": "WeatherReport", + "id": "1", + "args": {"temperature": "invalid", "conditions": "rainy"}, + } + ], + [ + { + "name": "WeatherReport", + "id": "2", + "args": {"temperature": 75.0, "conditions": "rainy"}, + } + ], + ] + + model = FakeToolCallingModel(tool_calls=tool_calls) + retry_middleware = StructuredOutputRetryMiddleware(max_retries=1) + + agent = create_agent( + model=model, + tools=[get_weather], + middleware=[retry_middleware], + response_format=ToolStrategy(schema=WeatherReport, handle_errors=False), + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("What's the weather in Seattle?")]}, + {"configurable": {"thread_id": "test"}}, + ) + + # Verify structured response is correct + assert "structured_response" in result + structured = result["structured_response"] + assert structured.temperature == 75.0 + assert structured.conditions == "rainy" + + # Verify messages include the retry feedback + messages = result["messages"] + human_messages = [m for m in messages if isinstance(m, HumanMessage)] + + # Should have at least 2 human messages: initial + retry feedback + assert len(human_messages) >= 2 + + # The retry feedback message should contain error information + retry_message = human_messages[-1] + assert "Error:" in retry_message.content + assert "Please try again" in retry_message.content diff --git a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_tool_emulator.py b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_tool_emulator.py new file mode 100644 index 00000000000..f522ca1648d --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_tool_emulator.py @@ -0,0 +1,625 @@ +"""Unit tests for tool emulator middleware.""" + +import typing +from collections.abc import Callable +from itertools import cycle +from typing import Any, Literal, Union + +from pydantic import BaseModel + +from langchain.agents import create_agent +from langchain.agents.middleware import LLMToolEmulator +from langchain.messages import AIMessage +from langchain_core.language_models import LanguageModelInput +from langchain_core.language_models.chat_models import BaseChatModel +from langchain_core.language_models.fake_chat_models import GenericFakeChatModel +from langchain_core.messages import BaseMessage, HumanMessage +from langchain_core.runnables import Runnable +from langchain_core.tools import BaseTool, tool + + +@tool +def get_weather(location: str) -> str: + """Get current weather for a location.""" + raise NotImplementedError("This tool should be emulated") + + +@tool +def search_web(query: str) -> str: + """Search the web for information.""" + raise NotImplementedError("This tool should be emulated") + + +@tool +def calculator(expression: str) -> str: + """Perform mathematical calculations.""" + # This tool executes normally (not emulated) + return f"Result: {eval(expression)}" + + +class FakeModel(GenericFakeChatModel): + """Fake model that supports bind_tools.""" + + tool_style: Literal["openai", "anthropic"] = "openai" + + def bind_tools( + self, + tools: typing.Sequence[Union[dict[str, Any], type[BaseModel], Callable, BaseTool]], + **kwargs: Any, + ) -> Runnable[LanguageModelInput, BaseMessage]: + if len(tools) == 0: + msg = "Must provide at least one tool" + raise ValueError(msg) + + tool_dicts = [] + for tool in tools: + if isinstance(tool, dict): + tool_dicts.append(tool) + continue + if not isinstance(tool, BaseTool): + msg = "Only BaseTool and dict is supported by FakeModel.bind_tools" + raise TypeError(msg) + + # NOTE: this is a simplified tool spec for testing purposes only + if self.tool_style == "openai": + tool_dicts.append( + { + "type": "function", + "function": { + "name": tool.name, + }, + } + ) + elif self.tool_style == "anthropic": + tool_dicts.append( + { + "name": tool.name, + } + ) + + return self.bind(tools=tool_dicts) + + +class FakeEmulatorModel(BaseChatModel): + """Fake model for emulating tool responses.""" + + responses: list[str] = ["Emulated response"] + response_index: int = 0 + + def _generate( + self, + messages: list[BaseMessage], + stop: list[str] | None = None, + run_manager: Any = None, + **kwargs: Any, + ) -> Any: + from langchain_core.outputs import ChatGeneration, ChatResult + + response = self.responses[self.response_index % len(self.responses)] + self.response_index += 1 + return ChatResult(generations=[ChatGeneration(message=AIMessage(content=response))]) + + async def _agenerate( + self, + messages: list[BaseMessage], + stop: list[str] | None = None, + run_manager: Any = None, + **kwargs: Any, + ) -> Any: + from langchain_core.outputs import ChatGeneration, ChatResult + + response = self.responses[self.response_index % len(self.responses)] + self.response_index += 1 + return ChatResult(generations=[ChatGeneration(message=AIMessage(content=response))]) + + @property + def _llm_type(self) -> str: + return "fake_emulator" + + +class TestLLMToolEmulatorBasic: + """Test basic tool emulator functionality.""" + + def test_emulates_specified_tool_by_name(self) -> None: + """Test that tools specified by name are emulated.""" + # Model that will call the tool + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "get_weather", "id": "1", "args": {"location": "Paris"}} + ], + ), + AIMessage(content="The weather has been retrieved."), + ] + ) + ) + + # Model that emulates tool responses + emulator_model = FakeEmulatorModel(responses=["Emulated: 72Β°F, sunny in Paris"]) + + emulator = LLMToolEmulator(tools=["get_weather"], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[get_weather, calculator], + middleware=[emulator], + ) + + result = agent.invoke({"messages": [HumanMessage("What's the weather in Paris?")]}) + + # Should complete without raising NotImplementedError + assert isinstance(result["messages"][-1], AIMessage) + + def test_emulates_specified_tool_by_instance(self) -> None: + """Test that tools specified by BaseTool instance are emulated.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[{"name": "search_web", "id": "1", "args": {"query": "Python"}}], + ), + AIMessage(content="Search results retrieved."), + ] + ) + ) + + emulator_model = FakeEmulatorModel(responses=["Emulated: Python is a programming language"]) + + emulator = LLMToolEmulator(tools=[search_web], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[search_web, calculator], + middleware=[emulator], + ) + + result = agent.invoke({"messages": [HumanMessage("Search for Python")]}) + + assert isinstance(result["messages"][-1], AIMessage) + + def test_non_emulated_tools_execute_normally(self) -> None: + """Test that tools not in tools_to_emulate execute normally.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "calculator", "id": "1", "args": {"expression": "2+2"}} + ], + ), + AIMessage(content="The calculation is complete."), + ] + ) + ) + + emulator_model = FakeEmulatorModel(responses=["Should not be used"]) + + # Only emulate get_weather, not calculator + emulator = LLMToolEmulator(tools=["get_weather"], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[get_weather, calculator], + middleware=[emulator], + ) + + result = agent.invoke({"messages": [HumanMessage("Calculate 2+2")]}) + + # Calculator should execute normally and return Result: 4 + tool_messages = [ + msg for msg in result["messages"] if hasattr(msg, "name") and msg.name == "calculator" + ] + assert len(tool_messages) > 0 + assert "Result: 4" in tool_messages[0].content + + def test_empty_tools_to_emulate_does_nothing(self) -> None: + """Test that empty tools_to_emulate list means no emulation occurs.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "calculator", "id": "1", "args": {"expression": "5*5"}} + ], + ), + AIMessage(content="Done."), + ] + ) + ) + + emulator_model = FakeEmulatorModel(responses=["Should not be used"]) + + emulator = LLMToolEmulator(tools=[], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[calculator], + middleware=[emulator], + ) + + result = agent.invoke({"messages": [HumanMessage("Calculate 5*5")]}) + + # Calculator should execute normally + tool_messages = [ + msg for msg in result["messages"] if hasattr(msg, "name") and msg.name == "calculator" + ] + assert len(tool_messages) > 0 + assert "Result: 25" in tool_messages[0].content + + def test_none_tools_emulates_all(self) -> None: + """Test that None tools means ALL tools are emulated (emulate_all behavior).""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "get_weather", "id": "1", "args": {"location": "NYC"}} + ], + ), + AIMessage(content="Done."), + ] + ) + ) + + emulator_model = FakeEmulatorModel(responses=["Emulated: 65Β°F in NYC"]) + + # tools=None means emulate ALL tools + emulator = LLMToolEmulator(tools=None, model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[get_weather], + middleware=[emulator], + ) + + result = agent.invoke({"messages": [HumanMessage("What's the weather in NYC?")]}) + + # Should complete without raising NotImplementedError + # (get_weather would normally raise NotImplementedError) + assert isinstance(result["messages"][-1], AIMessage) + + +class TestLLMToolEmulatorMultipleTools: + """Test emulating multiple tools.""" + + def test_emulate_multiple_tools(self) -> None: + """Test that multiple tools can be emulated.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "get_weather", "id": "1", "args": {"location": "Paris"}}, + {"name": "search_web", "id": "2", "args": {"query": "Paris"}}, + ], + ), + AIMessage(content="Both tools executed."), + ] + ) + ) + + emulator_model = FakeEmulatorModel( + responses=["Emulated weather: 20Β°C", "Emulated search results for Paris"] + ) + + emulator = LLMToolEmulator(tools=["get_weather", "search_web"], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[get_weather, search_web, calculator], + middleware=[emulator], + ) + + result = agent.invoke({"messages": [HumanMessage("Get weather and search for Paris")]}) + + # Both tools should be emulated without raising NotImplementedError + assert isinstance(result["messages"][-1], AIMessage) + + def test_mixed_emulated_and_real_tools(self) -> None: + """Test that some tools can be emulated while others execute normally.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "get_weather", "id": "1", "args": {"location": "NYC"}}, + {"name": "calculator", "id": "2", "args": {"expression": "10*2"}}, + ], + ), + AIMessage(content="Both completed."), + ] + ) + ) + + emulator_model = FakeEmulatorModel(responses=["Emulated: 65Β°F in NYC"]) + + # Only emulate get_weather + emulator = LLMToolEmulator(tools=["get_weather"], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[get_weather, calculator], + middleware=[emulator], + ) + + result = agent.invoke({"messages": [HumanMessage("Weather and calculate")]}) + + tool_messages = [msg for msg in result["messages"] if hasattr(msg, "name")] + assert len(tool_messages) >= 2 + + # Calculator should have real result + calc_messages = [msg for msg in tool_messages if msg.name == "calculator"] + assert len(calc_messages) > 0 + assert "Result: 20" in calc_messages[0].content + + +class TestLLMToolEmulatorModelConfiguration: + """Test custom model configuration for emulation.""" + + def test_custom_model_string(self) -> None: + """Test passing a model string for emulation.""" + # Just test that initialization works - don't require anthropic package + try: + emulator = LLMToolEmulator( + tools=["get_weather"], model="anthropic:claude-sonnet-4-5-20250929" + ) + assert emulator.model is not None + assert "get_weather" in emulator.tools_to_emulate + except ImportError: + # If anthropic isn't installed, that's fine for this unit test + pass + + def test_custom_model_instance(self) -> None: + """Test passing a BaseChatModel instance for emulation.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[{"name": "search_web", "id": "1", "args": {"query": "test"}}], + ), + AIMessage(content="Done."), + ] + ) + ) + + custom_emulator_model = FakeEmulatorModel(responses=["Custom emulated response"]) + + emulator = LLMToolEmulator(tools=["search_web"], model=custom_emulator_model) + + agent = create_agent( + model=agent_model, + tools=[search_web], + middleware=[emulator], + ) + + result = agent.invoke({"messages": [HumanMessage("Search for test")]}) + + # Should use the custom model for emulation + assert isinstance(result["messages"][-1], AIMessage) + + def test_default_model_used_when_none(self) -> None: + """Test that default model is used when model=None.""" + # Just test that initialization doesn't fail - don't require anthropic package + # The actual default model requires langchain_anthropic which may not be installed + try: + emulator = LLMToolEmulator(tools=["get_weather"], model=None) + assert emulator.model is not None + except ImportError: + # If anthropic isn't installed, that's fine for this unit test + # The integration tests will verify the full functionality + pass + + +class TestLLMToolEmulatorAsync: + """Test async tool emulator functionality.""" + + async def test_async_emulates_specified_tool_by_name(self) -> None: + """Test that tools specified by name are emulated in async mode.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "get_weather", "id": "1", "args": {"location": "Paris"}} + ], + ), + AIMessage(content="The weather has been retrieved."), + ] + ) + ) + + emulator_model = FakeEmulatorModel(responses=["Emulated: 72Β°F, sunny in Paris"]) + + emulator = LLMToolEmulator(tools=["get_weather"], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[get_weather, calculator], + middleware=[emulator], + ) + + result = await agent.ainvoke({"messages": [HumanMessage("What's the weather in Paris?")]}) + + # Should complete without raising NotImplementedError + assert isinstance(result["messages"][-1], AIMessage) + + async def test_async_emulates_specified_tool_by_instance(self) -> None: + """Test that tools specified by BaseTool instance are emulated in async mode.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[{"name": "search_web", "id": "1", "args": {"query": "Python"}}], + ), + AIMessage(content="Search results retrieved."), + ] + ) + ) + + emulator_model = FakeEmulatorModel(responses=["Emulated: Python is a programming language"]) + + emulator = LLMToolEmulator(tools=[search_web], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[search_web, calculator], + middleware=[emulator], + ) + + result = await agent.ainvoke({"messages": [HumanMessage("Search for Python")]}) + + assert isinstance(result["messages"][-1], AIMessage) + + async def test_async_non_emulated_tools_execute_normally(self) -> None: + """Test that tools not in tools_to_emulate execute normally in async mode.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "calculator", "id": "1", "args": {"expression": "2+2"}} + ], + ), + AIMessage(content="The calculation is complete."), + ] + ) + ) + + emulator_model = FakeEmulatorModel(responses=["Should not be used"]) + + # Only emulate get_weather, not calculator + emulator = LLMToolEmulator(tools=["get_weather"], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[get_weather, calculator], + middleware=[emulator], + ) + + result = await agent.ainvoke({"messages": [HumanMessage("Calculate 2+2")]}) + + # Calculator should execute normally and return Result: 4 + tool_messages = [ + msg for msg in result["messages"] if hasattr(msg, "name") and msg.name == "calculator" + ] + assert len(tool_messages) > 0 + assert "Result: 4" in tool_messages[0].content + + async def test_async_none_tools_emulates_all(self) -> None: + """Test that None tools means ALL tools are emulated in async mode.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "get_weather", "id": "1", "args": {"location": "NYC"}} + ], + ), + AIMessage(content="Done."), + ] + ) + ) + + emulator_model = FakeEmulatorModel(responses=["Emulated: 65Β°F in NYC"]) + + # tools=None means emulate ALL tools + emulator = LLMToolEmulator(tools=None, model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[get_weather], + middleware=[emulator], + ) + + result = await agent.ainvoke({"messages": [HumanMessage("What's the weather in NYC?")]}) + + # Should complete without raising NotImplementedError + assert isinstance(result["messages"][-1], AIMessage) + + async def test_async_emulate_multiple_tools(self) -> None: + """Test that multiple tools can be emulated in async mode.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "get_weather", "id": "1", "args": {"location": "Paris"}}, + {"name": "search_web", "id": "2", "args": {"query": "Paris"}}, + ], + ), + AIMessage(content="Both tools executed."), + ] + ) + ) + + emulator_model = FakeEmulatorModel( + responses=["Emulated weather: 20Β°C", "Emulated search results for Paris"] + ) + + emulator = LLMToolEmulator(tools=["get_weather", "search_web"], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[get_weather, search_web, calculator], + middleware=[emulator], + ) + + result = await agent.ainvoke( + {"messages": [HumanMessage("Get weather and search for Paris")]} + ) + + # Both tools should be emulated without raising NotImplementedError + assert isinstance(result["messages"][-1], AIMessage) + + async def test_async_mixed_emulated_and_real_tools(self) -> None: + """Test that some tools can be emulated while others execute normally in async mode.""" + agent_model = FakeModel( + messages=cycle( + [ + AIMessage( + content="", + tool_calls=[ + {"name": "get_weather", "id": "1", "args": {"location": "NYC"}}, + {"name": "calculator", "id": "2", "args": {"expression": "10*2"}}, + ], + ), + AIMessage(content="Both completed."), + ] + ) + ) + + emulator_model = FakeEmulatorModel(responses=["Emulated: 65Β°F in NYC"]) + + # Only emulate get_weather + emulator = LLMToolEmulator(tools=["get_weather"], model=emulator_model) + + agent = create_agent( + model=agent_model, + tools=[get_weather, calculator], + middleware=[emulator], + ) + + result = await agent.ainvoke({"messages": [HumanMessage("Weather and calculate")]}) + + tool_messages = [msg for msg in result["messages"] if hasattr(msg, "name")] + assert len(tool_messages) >= 2 + + # Calculator should have real result + calc_messages = [msg for msg in tool_messages if msg.name == "calculator"] + assert len(calc_messages) > 0 + assert "Result: 20" in calc_messages[0].content diff --git a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_tool_retry.py b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_tool_retry.py new file mode 100644 index 00000000000..88faa0deb15 --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_tool_retry.py @@ -0,0 +1,895 @@ +"""Tests for ToolRetryMiddleware functionality.""" + +import asyncio +import time +from collections.abc import Callable + +import pytest +from langchain_core.messages import HumanMessage, ToolCall, ToolMessage +from langchain_core.tools import tool +from langgraph.checkpoint.memory import InMemorySaver + +from langchain.agents.factory import create_agent +from langchain.agents.middleware.tool_retry import ToolRetryMiddleware +from tests.unit_tests.agents.test_middleware_agent import FakeToolCallingModel + + +@tool +def working_tool(input: str) -> str: + """Tool that always succeeds.""" + return f"Success: {input}" + + +@tool +def failing_tool(input: str) -> str: + """Tool that always fails.""" + msg = f"Failed: {input}" + raise ValueError(msg) + + +class TemporaryFailureTool: + """Tool that fails a certain number of times before succeeding.""" + + def __init__(self, fail_count: int): + """Initialize with the number of times to fail. + + Args: + fail_count: Number of times to fail before succeeding. + """ + self.fail_count = fail_count + self.attempt = 0 + + def __call__(self, input: str) -> str: + """Execute the tool. + + Args: + input: Input string. + + Returns: + Success message if attempt >= fail_count. + + Raises: + ValueError: If attempt < fail_count. + """ + self.attempt += 1 + if self.attempt <= self.fail_count: + msg = f"Temporary failure {self.attempt}" + raise ValueError(msg) + return f"Success after {self.attempt} attempts: {input}" + + +def test_tool_retry_initialization_defaults() -> None: + """Test ToolRetryMiddlewareinitialization with default values.""" + retry = ToolRetryMiddleware() + + assert retry.max_retries == 2 + assert retry._tool_filter is None + assert retry.tools == [] + assert retry.on_failure == "return_message" + assert retry.backoff_factor == 2.0 + assert retry.initial_delay == 1.0 + assert retry.max_delay == 60.0 + assert retry.jitter is True + + +def test_tool_retry_initialization_custom() -> None: + """Test ToolRetryMiddlewareinitialization with custom values.""" + retry = ToolRetryMiddleware( + max_retries=5, + tools=["tool1", "tool2"], + retry_on=(ValueError, RuntimeError), + on_failure="raise", + backoff_factor=1.5, + initial_delay=0.5, + max_delay=30.0, + jitter=False, + ) + + assert retry.max_retries == 5 + assert retry._tool_filter == ["tool1", "tool2"] + assert retry.tools == [] + assert retry.retry_on == (ValueError, RuntimeError) + assert retry.on_failure == "raise" + assert retry.backoff_factor == 1.5 + assert retry.initial_delay == 0.5 + assert retry.max_delay == 30.0 + assert retry.jitter is False + + +def test_tool_retry_initialization_with_base_tools() -> None: + """Test ToolRetryMiddleware initialization with BaseTool instances.""" + retry = ToolRetryMiddleware( + max_retries=3, + tools=[working_tool, failing_tool], # Pass BaseTool instances + ) + + assert retry.max_retries == 3 + # Should extract names from BaseTool instances + assert retry._tool_filter == ["working_tool", "failing_tool"] + assert retry.tools == [] + + +def test_tool_retry_initialization_with_mixed_tools() -> None: + """Test ToolRetryMiddleware initialization with mixed tool types.""" + retry = ToolRetryMiddleware( + max_retries=2, + tools=[working_tool, "failing_tool"], # Mix of BaseTool and string + ) + + assert retry.max_retries == 2 + # Should handle both BaseTool instances and strings + assert retry._tool_filter == ["working_tool", "failing_tool"] + assert retry.tools == [] + + +def test_tool_retry_invalid_max_retries() -> None: + """Test ToolRetryMiddlewareraises error for invalid max_retries.""" + with pytest.raises(ValueError, match="max_retries must be >= 0"): + ToolRetryMiddleware(max_retries=-1) + + +def test_tool_retry_invalid_initial_delay() -> None: + """Test ToolRetryMiddlewareraises error for invalid initial_delay.""" + with pytest.raises(ValueError, match="initial_delay must be >= 0"): + ToolRetryMiddleware(initial_delay=-1.0) + + +def test_tool_retry_invalid_max_delay() -> None: + """Test ToolRetryMiddlewareraises error for invalid max_delay.""" + with pytest.raises(ValueError, match="max_delay must be >= 0"): + ToolRetryMiddleware(max_delay=-1.0) + + +def test_tool_retry_invalid_backoff_factor() -> None: + """Test ToolRetryMiddlewareraises error for invalid backoff_factor.""" + with pytest.raises(ValueError, match="backoff_factor must be >= 0"): + ToolRetryMiddleware(backoff_factor=-1.0) + + +def test_tool_retry_working_tool_no_retry_needed() -> None: + """Test ToolRetryMiddlewarewith a working tool (no retry needed).""" + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="working_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware(max_retries=2, initial_delay=0.01, jitter=False) + + agent = create_agent( + model=model, + tools=[working_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Use working tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + assert "Success: test" in tool_messages[0].content + assert tool_messages[0].status != "error" + + +def test_tool_retry_failing_tool_returns_message() -> None: + """Test ToolRetryMiddlewarewith failing tool returns error message.""" + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="failing_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=2, + initial_delay=0.01, + jitter=False, + on_failure="return_message", + ) + + agent = create_agent( + model=model, + tools=[failing_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Use failing tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + # Should contain error message with tool name and attempts + assert "failing_tool" in tool_messages[0].content + assert "3 attempts" in tool_messages[0].content + assert "ValueError" in tool_messages[0].content + assert tool_messages[0].status == "error" + + +def test_tool_retry_failing_tool_raises() -> None: + """Test ToolRetryMiddlewarewith on_failure='raise' re-raises exception.""" + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="failing_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=2, + initial_delay=0.01, + jitter=False, + on_failure="raise", + ) + + agent = create_agent( + model=model, + tools=[failing_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + # Should raise the ValueError from the tool + with pytest.raises(ValueError, match="Failed: test"): + agent.invoke( + {"messages": [HumanMessage("Use failing tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + +def test_tool_retry_custom_failure_formatter() -> None: + """Test ToolRetryMiddlewarewith custom failure message formatter.""" + + def custom_formatter(exc: Exception) -> str: + return f"Custom error: {type(exc).__name__}" + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="failing_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=1, + initial_delay=0.01, + jitter=False, + on_failure=custom_formatter, + ) + + agent = create_agent( + model=model, + tools=[failing_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Use failing tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + assert "Custom error: ValueError" in tool_messages[0].content + + +def test_tool_retry_succeeds_after_retries() -> None: + """Test ToolRetryMiddlewaresucceeds after temporary failures.""" + temp_fail = TemporaryFailureTool(fail_count=2) + + @tool + def temp_failing_tool(input: str) -> str: + """Tool that fails temporarily.""" + return temp_fail(input) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="temp_failing_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=3, + initial_delay=0.01, + jitter=False, + ) + + agent = create_agent( + model=model, + tools=[temp_failing_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Use temp failing tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + # Should succeed on 3rd attempt + assert "Success after 3 attempts" in tool_messages[0].content + assert tool_messages[0].status != "error" + + +def test_tool_retry_specific_tools_only() -> None: + """Test ToolRetryMiddlewareonly applies to specific tools.""" + model = FakeToolCallingModel( + tool_calls=[ + [ + ToolCall(name="failing_tool", args={"input": "test1"}, id="1"), + ToolCall(name="working_tool", args={"input": "test2"}, id="2"), + ], + [], + ] + ) + + # Only retry failing_tool + retry = ToolRetryMiddleware( + max_retries=2, + tools=["failing_tool"], + initial_delay=0.01, + jitter=False, + on_failure="return_message", + ) + + agent = create_agent( + model=model, + tools=[failing_tool, working_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Use both tools")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 2 + + # failing_tool should have error message + failing_msg = next(m for m in tool_messages if m.name == "failing_tool") + assert failing_msg.status == "error" + assert "3 attempts" in failing_msg.content + + # working_tool should succeed normally (no retry applied) + working_msg = next(m for m in tool_messages if m.name == "working_tool") + assert "Success: test2" in working_msg.content + assert working_msg.status != "error" + + +def test_tool_retry_specific_tools_with_base_tool() -> None: + """Test ToolRetryMiddleware accepts BaseTool instances for filtering.""" + model = FakeToolCallingModel( + tool_calls=[ + [ + ToolCall(name="failing_tool", args={"input": "test1"}, id="1"), + ToolCall(name="working_tool", args={"input": "test2"}, id="2"), + ], + [], + ] + ) + + # Only retry failing_tool, passed as BaseTool instance + retry = ToolRetryMiddleware( + max_retries=2, + tools=[failing_tool], # Pass BaseTool instance + initial_delay=0.01, + jitter=False, + on_failure="return_message", + ) + + agent = create_agent( + model=model, + tools=[failing_tool, working_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Use both tools")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 2 + + # failing_tool should have error message (with retries) + failing_msg = next(m for m in tool_messages if m.name == "failing_tool") + assert failing_msg.status == "error" + assert "3 attempts" in failing_msg.content + + # working_tool should succeed normally (no retry applied) + working_msg = next(m for m in tool_messages if m.name == "working_tool") + assert "Success: test2" in working_msg.content + assert working_msg.status != "error" + + +def test_tool_retry_specific_exceptions() -> None: + """Test ToolRetryMiddlewareonly retries specific exception types.""" + + @tool + def value_error_tool(input: str) -> str: + """Tool that raises ValueError.""" + msg = f"ValueError: {input}" + raise ValueError(msg) + + @tool + def runtime_error_tool(input: str) -> str: + """Tool that raises RuntimeError.""" + msg = f"RuntimeError: {input}" + raise RuntimeError(msg) + + model = FakeToolCallingModel( + tool_calls=[ + [ + ToolCall(name="value_error_tool", args={"input": "test1"}, id="1"), + ToolCall(name="runtime_error_tool", args={"input": "test2"}, id="2"), + ], + [], + ] + ) + + # Only retry ValueError + retry = ToolRetryMiddleware( + max_retries=2, + retry_on=(ValueError,), + initial_delay=0.01, + jitter=False, + on_failure="return_message", + ) + + agent = create_agent( + model=model, + tools=[value_error_tool, runtime_error_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Use both tools")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 2 + + # ValueError should be retried (3 attempts) + value_error_msg = next(m for m in tool_messages if m.name == "value_error_tool") + assert "3 attempts" in value_error_msg.content + + # RuntimeError should fail immediately (1 attempt only) + runtime_error_msg = next(m for m in tool_messages if m.name == "runtime_error_tool") + assert "1 attempt" in runtime_error_msg.content + + +def test_tool_retry_custom_exception_filter() -> None: + """Test ToolRetryMiddlewarewith custom exception filter function.""" + + class CustomError(Exception): + """Custom exception with retry_me attribute.""" + + def __init__(self, message: str, retry_me: bool): + """Initialize custom error. + + Args: + message: Error message. + retry_me: Whether this error should be retried. + """ + super().__init__(message) + self.retry_me = retry_me + + attempt_count = {"value": 0} + + @tool + def custom_error_tool(input: str) -> str: + """Tool that raises CustomError.""" + attempt_count["value"] += 1 + if attempt_count["value"] == 1: + raise CustomError("Retryable error", retry_me=True) + raise CustomError("Non-retryable error", retry_me=False) + + def should_retry(exc: Exception) -> bool: + return isinstance(exc, CustomError) and exc.retry_me + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="custom_error_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=3, + retry_on=should_retry, + initial_delay=0.01, + jitter=False, + on_failure="return_message", + ) + + agent = create_agent( + model=model, + tools=[custom_error_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Use custom error tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + + # Should retry once (attempt 1 with retry_me=True), then fail on attempt 2 (retry_me=False) + assert attempt_count["value"] == 2 + assert "2 attempts" in tool_messages[0].content + + +def test_tool_retry_backoff_timing() -> None: + """Test ToolRetryMiddlewareapplies correct backoff delays.""" + temp_fail = TemporaryFailureTool(fail_count=3) + + @tool + def temp_failing_tool(input: str) -> str: + """Tool that fails temporarily.""" + return temp_fail(input) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="temp_failing_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=3, + initial_delay=0.1, + backoff_factor=2.0, + jitter=False, + ) + + agent = create_agent( + model=model, + tools=[temp_failing_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + start_time = time.time() + result = agent.invoke( + {"messages": [HumanMessage("Use temp failing tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + elapsed = time.time() - start_time + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + + # Expected delays: 0.1 + 0.2 + 0.4 = 0.7 seconds + # Allow some margin for execution time + assert elapsed >= 0.6, f"Expected at least 0.6s, got {elapsed}s" + + +def test_tool_retry_constant_backoff() -> None: + """Test ToolRetryMiddlewarewith constant backoff (backoff_factor=0).""" + temp_fail = TemporaryFailureTool(fail_count=2) + + @tool + def temp_failing_tool(input: str) -> str: + """Tool that fails temporarily.""" + return temp_fail(input) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="temp_failing_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=2, + initial_delay=0.1, + backoff_factor=0.0, # Constant backoff + jitter=False, + ) + + agent = create_agent( + model=model, + tools=[temp_failing_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + start_time = time.time() + result = agent.invoke( + {"messages": [HumanMessage("Use temp failing tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + elapsed = time.time() - start_time + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + + # Expected delays: 0.1 + 0.1 = 0.2 seconds (constant) + assert elapsed >= 0.15, f"Expected at least 0.15s, got {elapsed}s" + assert elapsed < 0.5, f"Expected less than 0.5s (exponential would be longer), got {elapsed}s" + + +def test_tool_retry_max_delay_cap() -> None: + """Test ToolRetryMiddlewarecaps delay at max_delay.""" + retry = ToolRetryMiddleware( + max_retries=5, + initial_delay=1.0, + backoff_factor=10.0, # Very aggressive backoff + max_delay=2.0, # Cap at 2 seconds + jitter=False, + ) + + # Test delay calculation + delay_0 = retry._calculate_delay(0) # 1.0 + delay_1 = retry._calculate_delay(1) # 10.0 -> capped to 2.0 + delay_2 = retry._calculate_delay(2) # 100.0 -> capped to 2.0 + + assert delay_0 == 1.0 + assert delay_1 == 2.0 + assert delay_2 == 2.0 + + +def test_tool_retry_jitter_variation() -> None: + """Test ToolRetryMiddlewareadds jitter to delays.""" + retry = ToolRetryMiddleware( + max_retries=1, + initial_delay=1.0, + backoff_factor=1.0, + jitter=True, + ) + + # Generate multiple delays and ensure they vary + delays = [retry._calculate_delay(0) for _ in range(10)] + + # All delays should be within Β±25% of 1.0 (i.e., between 0.75 and 1.25) + for delay in delays: + assert 0.75 <= delay <= 1.25 + + # Delays should vary (not all the same) + assert len(set(delays)) > 1 + + +@pytest.mark.asyncio +async def test_tool_retry_async_working_tool() -> None: + """Test ToolRetryMiddlewarewith async execution and working tool.""" + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="working_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware(max_retries=2, initial_delay=0.01, jitter=False) + + agent = create_agent( + model=model, + tools=[working_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = await agent.ainvoke( + {"messages": [HumanMessage("Use working tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + assert "Success: test" in tool_messages[0].content + + +@pytest.mark.asyncio +async def test_tool_retry_async_failing_tool() -> None: + """Test ToolRetryMiddlewarewith async execution and failing tool.""" + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="failing_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=2, + initial_delay=0.01, + jitter=False, + on_failure="return_message", + ) + + agent = create_agent( + model=model, + tools=[failing_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = await agent.ainvoke( + {"messages": [HumanMessage("Use failing tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + assert "failing_tool" in tool_messages[0].content + assert "3 attempts" in tool_messages[0].content + assert tool_messages[0].status == "error" + + +@pytest.mark.asyncio +async def test_tool_retry_async_succeeds_after_retries() -> None: + """Test ToolRetryMiddlewareasync execution succeeds after temporary failures.""" + temp_fail = TemporaryFailureTool(fail_count=2) + + @tool + def temp_failing_tool(input: str) -> str: + """Tool that fails temporarily.""" + return temp_fail(input) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="temp_failing_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=3, + initial_delay=0.01, + jitter=False, + ) + + agent = create_agent( + model=model, + tools=[temp_failing_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = await agent.ainvoke( + {"messages": [HumanMessage("Use temp failing tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + assert "Success after 3 attempts" in tool_messages[0].content + + +@pytest.mark.asyncio +async def test_tool_retry_async_backoff_timing() -> None: + """Test ToolRetryMiddlewareasync applies correct backoff delays.""" + temp_fail = TemporaryFailureTool(fail_count=3) + + @tool + def temp_failing_tool(input: str) -> str: + """Tool that fails temporarily.""" + return temp_fail(input) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="temp_failing_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=3, + initial_delay=0.1, + backoff_factor=2.0, + jitter=False, + ) + + agent = create_agent( + model=model, + tools=[temp_failing_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + start_time = time.time() + result = await agent.ainvoke( + {"messages": [HumanMessage("Use temp failing tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + elapsed = time.time() - start_time + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + + # Expected delays: 0.1 + 0.2 + 0.4 = 0.7 seconds + assert elapsed >= 0.6, f"Expected at least 0.6s, got {elapsed}s" + + +def test_tool_retry_zero_retries() -> None: + """Test ToolRetryMiddlewarewith max_retries=0 (no retries).""" + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="failing_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware( + max_retries=0, # No retries + on_failure="return_message", + ) + + agent = create_agent( + model=model, + tools=[failing_tool], + middleware=[retry], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Use failing tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + # Should fail after 1 attempt (no retries) + assert "1 attempt" in tool_messages[0].content + assert tool_messages[0].status == "error" + + +def test_tool_retry_multiple_middleware_composition() -> None: + """Test ToolRetryMiddlewarecomposes correctly with other middleware.""" + call_log = [] + + # Custom middleware that logs calls + from langchain.agents.middleware.types import wrap_tool_call + + @wrap_tool_call + def logging_middleware( + request: "ToolCallRequest", handler: Callable + ) -> "ToolMessage | Command": + call_log.append(f"before_{request.tool.name}") + response = handler(request) + call_log.append(f"after_{request.tool.name}") + return response + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="working_tool", args={"input": "test"}, id="1")], + [], + ] + ) + + retry = ToolRetryMiddleware(max_retries=2, initial_delay=0.01, jitter=False) + + agent = create_agent( + model=model, + tools=[working_tool], + middleware=[logging_middleware, retry], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Use working tool")]}, + {"configurable": {"thread_id": "test"}}, + ) + + # Both middleware should be called + assert call_log == ["before_working_tool", "after_working_tool"] + + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + assert "Success: test" in tool_messages[0].content diff --git a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_model_call_decorator.py b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_model_call_decorator.py index 17b201d3132..29194b484dd 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_model_call_decorator.py +++ b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_model_call_decorator.py @@ -77,7 +77,9 @@ class TestOnModelCallDecorator: @wrap_model_call def uppercase_responses(request, handler): result = handler(request) - return AIMessage(content=result.content.upper()) + # result is ModelResponse, extract AIMessage from it + ai_message = result.result[0] + return AIMessage(content=ai_message.content.upper()) model = GenericFakeChatModel(messages=iter([AIMessage(content="hello world")])) agent = create_agent(model=model, middleware=[uppercase_responses]) @@ -336,9 +338,11 @@ class TestOnModelCallDecorator: @wrap_model_call def multi_transform(request, handler): result = handler(request) + # result is ModelResponse, extract AIMessage from it + ai_message = result.result[0] # First transformation: uppercase - content = result.content.upper() + content = ai_message.content.upper() # Second transformation: add prefix and suffix content = f"[START] {content} [END]" return AIMessage(content=content) diff --git a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_model_call_middleware.py b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_model_call_middleware.py index 893432bd516..52b5d613a40 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_model_call_middleware.py +++ b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_model_call_middleware.py @@ -153,7 +153,9 @@ class TestResponseRewriting: class UppercaseMiddleware(AgentMiddleware): def wrap_model_call(self, request, handler): result = handler(request) - return AIMessage(content=result.content.upper()) + # result is ModelResponse, extract AIMessage from it + ai_message = result.result[0] + return AIMessage(content=ai_message.content.upper()) model = GenericFakeChatModel(messages=iter([AIMessage(content="hello world")])) agent = create_agent(model=model, middleware=[UppercaseMiddleware()]) @@ -172,7 +174,9 @@ class TestResponseRewriting: def wrap_model_call(self, request, handler): result = handler(request) - return AIMessage(content=f"{self.prefix}{result.content}") + # result is ModelResponse, extract AIMessage from it + ai_message = result.result[0] + return AIMessage(content=f"{self.prefix}{ai_message.content}") model = GenericFakeChatModel(messages=iter([AIMessage(content="Response")])) agent = create_agent(model=model, middleware=[PrefixMiddleware(prefix="[BOT]: ")]) @@ -376,7 +380,6 @@ class TestStateAndRuntime: # Access state from request state_values.append( { - "thread_model_call_count": request.state.get("thread_model_call_count", 0), "messages_count": len(request.state.get("messages", [])), } ) @@ -388,7 +391,7 @@ class TestStateAndRuntime: result = agent.invoke({"messages": [HumanMessage("Test")]}) assert len(state_values) == 1 - assert state_values[0]["messages_count"] == 1 # Just the human message + assert state_values[0]["messages_count"] == 1 # Just The HumanMessage assert result["messages"][1].content == "Response" def test_retry_with_state_tracking(self) -> None: @@ -511,12 +514,16 @@ class TestMiddlewareComposition: class PrefixMiddleware(AgentMiddleware): def wrap_model_call(self, request, handler): result = handler(request) - return AIMessage(content=f"[PREFIX] {result.content}") + # result is ModelResponse, extract AIMessage from it + ai_message = result.result[0] + return AIMessage(content=f"[PREFIX] {ai_message.content}") class SuffixMiddleware(AgentMiddleware): def wrap_model_call(self, request, handler): result = handler(request) - return AIMessage(content=f"{result.content} [SUFFIX]") + # result is ModelResponse, extract AIMessage from it + ai_message = result.result[0] + return AIMessage(content=f"{ai_message.content} [SUFFIX]") model = GenericFakeChatModel(messages=iter([AIMessage(content="Middle")])) # Prefix is outer, Suffix is inner @@ -551,7 +558,9 @@ class TestMiddlewareComposition: class UppercaseMiddleware(AgentMiddleware): def wrap_model_call(self, request, handler): result = handler(request) - return AIMessage(content=result.content.upper()) + # result is ModelResponse, extract AIMessage from it + ai_message = result.result[0] + return AIMessage(content=ai_message.content.upper()) model = FailOnceThenSucceed(messages=iter([AIMessage(content="success")])) # Retry outer, Uppercase inner diff --git a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_tool_call_decorator.py b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_tool_call_decorator.py index 2f33e036de0..ca92bc65b78 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_tool_call_decorator.py +++ b/libs/langchain_v1/tests/unit_tests/agents/middleware/test_wrap_tool_call_decorator.py @@ -4,7 +4,6 @@ These tests verify the decorator-based approach for wrapping tool calls, focusing on the handler pattern (not generators). """ -import pytest from collections.abc import Callable from langchain_core.messages import HumanMessage, ToolCall, ToolMessage @@ -14,7 +13,7 @@ from langgraph.types import Command from langchain.agents.factory import create_agent from langchain.agents.middleware.types import wrap_tool_call -from langchain.tools.tool_node import ToolCallRequest +from langchain.agents.middleware.types import ToolCallRequest from tests.unit_tests.agents.test_middleware_agent import FakeToolCallingModel @@ -210,7 +209,7 @@ def test_wrap_tool_call_access_runtime() -> None: # Middleware should have accessed runtime assert len(runtime_data) >= 1 - assert runtime_data[0] == "Runtime" + assert runtime_data[0] == "ToolRuntime" def test_wrap_tool_call_retry_on_error() -> None: diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_context_editing_middleware.py b/libs/langchain_v1/tests/unit_tests/agents/test_context_editing_middleware.py index 3ba8bb285bd..33a05730ce0 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/test_context_editing_middleware.py +++ b/libs/langchain_v1/tests/unit_tests/agents/test_context_editing_middleware.py @@ -13,6 +13,7 @@ from langchain_core.language_models.fake_chat_models import FakeChatModel from langchain_core.messages import ( AIMessage, MessageLikeRepresentation, + SystemMessage, ToolMessage, ) from langgraph.runtime import Runtime @@ -233,3 +234,292 @@ def test_exclude_tools_prevents_clearing() -> None: def _fake_runtime() -> Runtime: return cast(Runtime, object()) + + +async def test_no_edit_when_below_trigger_async() -> None: + """Test async version of context editing with no edit when below trigger.""" + tool_call_id = "call-1" + ai_message = AIMessage( + content="", + tool_calls=[{"id": tool_call_id, "name": "search", "args": {}}], + ) + tool_message = ToolMessage(content="12345", tool_call_id=tool_call_id) + + state, request = _make_state_and_request([ai_message, tool_message]) + middleware = ContextEditingMiddleware( + edits=[ClearToolUsesEdit(trigger=50)], + ) + + async def mock_handler(req: ModelRequest) -> AIMessage: + return AIMessage(content="mock response") + + # Call awrap_model_call which modifies the request + await middleware.awrap_model_call(request, mock_handler) + + # The request should have been modified in place + assert request.messages[0].content == "" + assert request.messages[1].content == "12345" + assert state["messages"] == request.messages + + +async def test_clear_tool_outputs_and_inputs_async() -> None: + """Test async version of clearing tool outputs and inputs.""" + tool_call_id = "call-2" + ai_message = AIMessage( + content=[ + {"type": "tool_call", "id": tool_call_id, "name": "search", "args": {"query": "foo"}} + ], + tool_calls=[{"id": tool_call_id, "name": "search", "args": {"query": "foo"}}], + ) + tool_message = ToolMessage(content="x" * 200, tool_call_id=tool_call_id) + + state, request = _make_state_and_request([ai_message, tool_message]) + + edit = ClearToolUsesEdit( + trigger=50, + clear_at_least=10, + clear_tool_inputs=True, + keep=0, + placeholder="[cleared output]", + ) + middleware = ContextEditingMiddleware(edits=[edit]) + + async def mock_handler(req: ModelRequest) -> AIMessage: + return AIMessage(content="mock response") + + # Call awrap_model_call which modifies the request + await middleware.awrap_model_call(request, mock_handler) + + cleared_ai = request.messages[0] + cleared_tool = request.messages[1] + + assert isinstance(cleared_tool, ToolMessage) + assert cleared_tool.content == "[cleared output]" + assert cleared_tool.response_metadata["context_editing"]["cleared"] is True + + assert isinstance(cleared_ai, AIMessage) + assert cleared_ai.tool_calls[0]["args"] == {} + context_meta = cleared_ai.response_metadata.get("context_editing") + assert context_meta is not None + assert context_meta["cleared_tool_inputs"] == [tool_call_id] + + assert state["messages"] == request.messages + + +async def test_respects_keep_last_tool_results_async() -> None: + """Test async version respects keep parameter for last tool results.""" + conversation: list[AIMessage | ToolMessage] = [] + edits = [ + ("call-a", "tool-output-a" * 5), + ("call-b", "tool-output-b" * 5), + ("call-c", "tool-output-c" * 5), + ] + + for call_id, text in edits: + conversation.append( + AIMessage( + content="", + tool_calls=[{"id": call_id, "name": "tool", "args": {"input": call_id}}], + ) + ) + conversation.append(ToolMessage(content=text, tool_call_id=call_id)) + + state, request = _make_state_and_request(conversation) + + middleware = ContextEditingMiddleware( + edits=[ + ClearToolUsesEdit( + trigger=50, + keep=1, + placeholder="[cleared]", + ) + ], + token_count_method="model", + ) + + async def mock_handler(req: ModelRequest) -> AIMessage: + return AIMessage(content="mock response") + + # Call awrap_model_call which modifies the request + await middleware.awrap_model_call(request, mock_handler) + + cleared_messages = [ + msg + for msg in request.messages + if isinstance(msg, ToolMessage) and msg.content == "[cleared]" + ] + + assert len(cleared_messages) == 2 + assert isinstance(request.messages[-1], ToolMessage) + assert request.messages[-1].content != "[cleared]" + + +async def test_exclude_tools_prevents_clearing_async() -> None: + """Test async version of excluding tools from clearing.""" + search_call = "call-search" + calc_call = "call-calc" + + state, request = _make_state_and_request( + [ + AIMessage( + content="", + tool_calls=[{"id": search_call, "name": "search", "args": {"query": "foo"}}], + ), + ToolMessage(content="search-results" * 20, tool_call_id=search_call), + AIMessage( + content="", + tool_calls=[{"id": calc_call, "name": "calculator", "args": {"a": 1, "b": 2}}], + ), + ToolMessage(content="42", tool_call_id=calc_call), + ] + ) + + middleware = ContextEditingMiddleware( + edits=[ + ClearToolUsesEdit( + trigger=50, + clear_at_least=10, + keep=0, + exclude_tools=("search",), + placeholder="[cleared]", + ) + ], + ) + + async def mock_handler(req: ModelRequest) -> AIMessage: + return AIMessage(content="mock response") + + # Call awrap_model_call which modifies the request + await middleware.awrap_model_call(request, mock_handler) + + search_tool = request.messages[1] + calc_tool = request.messages[3] + + assert isinstance(search_tool, ToolMessage) + assert search_tool.content == "search-results" * 20 + + assert isinstance(calc_tool, ToolMessage) + assert calc_tool.content == "[cleared]" + + +# ============================================================================== +# SystemMessage Tests +# ============================================================================== + + +def test_handles_system_message_prompt() -> None: + """Test that middleware handles SystemMessage as system_prompt correctly.""" + tool_call_id = "call-1" + ai_message = AIMessage( + content="", + tool_calls=[{"id": tool_call_id, "name": "search", "args": {}}], + ) + tool_message = ToolMessage(content="12345", tool_call_id=tool_call_id) + + system_prompt = SystemMessage(content="You are a helpful assistant.") + state, request = _make_state_and_request([ai_message, tool_message], system_prompt=None) + # Manually set SystemMessage as system_prompt + request.system_prompt = system_prompt + + middleware = ContextEditingMiddleware( + edits=[ClearToolUsesEdit(trigger=50)], + token_count_method="model", + ) + + def mock_handler(req: ModelRequest) -> AIMessage: + return AIMessage(content="mock response") + + # Call wrap_model_call - should not fail with SystemMessage + middleware.wrap_model_call(request, mock_handler) + + # Request should have processed without errors + assert request.system_prompt == system_prompt + assert isinstance(request.system_prompt, SystemMessage) + + +def test_does_not_double_wrap_system_message() -> None: + """Test that middleware doesn't wrap SystemMessage in another SystemMessage.""" + tool_call_id = "call-1" + ai_message = AIMessage( + content="", + tool_calls=[{"id": tool_call_id, "name": "search", "args": {}}], + ) + tool_message = ToolMessage(content="x" * 100, tool_call_id=tool_call_id) + + system_prompt = SystemMessage(content="Original system prompt") + state, request = _make_state_and_request([ai_message, tool_message], system_prompt=None) + request.system_prompt = system_prompt + + middleware = ContextEditingMiddleware( + edits=[ClearToolUsesEdit(trigger=50)], + token_count_method="model", + ) + + def mock_handler(req: ModelRequest) -> AIMessage: + return AIMessage(content="mock response") + + middleware.wrap_model_call(request, mock_handler) + + # System prompt should still be the same SystemMessage, not wrapped + assert request.system_prompt == system_prompt + assert isinstance(request.system_prompt, SystemMessage) + assert request.system_prompt.content == "Original system prompt" + + +async def test_handles_system_message_prompt_async() -> None: + """Test async version - middleware handles SystemMessage as system_prompt correctly.""" + tool_call_id = "call-1" + ai_message = AIMessage( + content="", + tool_calls=[{"id": tool_call_id, "name": "search", "args": {}}], + ) + tool_message = ToolMessage(content="12345", tool_call_id=tool_call_id) + + system_prompt = SystemMessage(content="You are a helpful assistant.") + state, request = _make_state_and_request([ai_message, tool_message], system_prompt=None) + # Manually set SystemMessage as system_prompt + request.system_prompt = system_prompt + + middleware = ContextEditingMiddleware( + edits=[ClearToolUsesEdit(trigger=50)], + token_count_method="model", + ) + + async def mock_handler(req: ModelRequest) -> AIMessage: + return AIMessage(content="mock response") + + # Call awrap_model_call - should not fail with SystemMessage + await middleware.awrap_model_call(request, mock_handler) + + # Request should have processed without errors + assert request.system_prompt == system_prompt + assert isinstance(request.system_prompt, SystemMessage) + + +async def test_does_not_double_wrap_system_message_async() -> None: + """Test async version - middleware doesn't wrap SystemMessage in another SystemMessage.""" + tool_call_id = "call-1" + ai_message = AIMessage( + content="", + tool_calls=[{"id": tool_call_id, "name": "search", "args": {}}], + ) + tool_message = ToolMessage(content="x" * 100, tool_call_id=tool_call_id) + + system_prompt = SystemMessage(content="Original system prompt") + state, request = _make_state_and_request([ai_message, tool_message], system_prompt=None) + request.system_prompt = system_prompt + + middleware = ContextEditingMiddleware( + edits=[ClearToolUsesEdit(trigger=50)], + token_count_method="model", + ) + + async def mock_handler(req: ModelRequest) -> AIMessage: + return AIMessage(content="mock response") + + await middleware.awrap_model_call(request, mock_handler) + + # System prompt should still be the same SystemMessage, not wrapped + assert request.system_prompt == system_prompt + assert isinstance(request.system_prompt, SystemMessage) + assert request.system_prompt.content == "Original system prompt" diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_create_agent_tool_validation.py b/libs/langchain_v1/tests/unit_tests/agents/test_create_agent_tool_validation.py new file mode 100644 index 00000000000..537dd9e9544 --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/test_create_agent_tool_validation.py @@ -0,0 +1,370 @@ +import sys +import pytest +from typing import Annotated + +from langchain.agents import AgentState, create_agent +from langchain.tools import InjectedState, tool as dec_tool +from .model import FakeToolCallingModel +from langchain_core.messages import HumanMessage +from langgraph.prebuilt import InjectedStore, ToolRuntime +from langgraph.store.base import BaseStore +from langgraph.store.memory import InMemoryStore + + +@pytest.mark.skipif( + sys.version_info >= (3, 14), reason="Pydantic model rebuild issue in Python 3.14" +) +def test_tool_invocation_error_excludes_injected_state() -> None: + """Test that tool invocation errors only include LLM-controllable arguments. + When a tool has InjectedState parameters and the LLM makes an incorrect + invocation (e.g., missing required arguments), the error message should only + contain the arguments from the tool call that the LLM controls. This ensures + the LLM receives relevant context to correct its mistakes, without being + distracted by system-injected parameters it has no control over. + This test uses create_agent to ensure the behavior works in a full agent context. + """ + + # Define a custom state schema with injected data + class TestState(AgentState): + secret_data: str # Example of state data not controlled by LLM + + @dec_tool + def tool_with_injected_state( + some_val: int, + state: Annotated[TestState, InjectedState], + ) -> str: + """Tool that uses injected state.""" + return f"some_val: {some_val}" + + # Create a fake model that makes an incorrect tool call (missing 'some_val') + # Then returns no tool calls on the second iteration to end the loop + model = FakeToolCallingModel( + tool_calls=[ + [ + { + "name": "tool_with_injected_state", + "args": {"wrong_arg": "value"}, # Missing required 'some_val' + "id": "call_1", + } + ], + [], # No tool calls on second iteration to end the loop + ] + ) + + # Create an agent with the tool and custom state schema + agent = create_agent( + model=model, + tools=[tool_with_injected_state], + state_schema=TestState, + ) + + # Invoke the agent with injected state data + result = agent.invoke( + { + "messages": [HumanMessage("Test message")], + "secret_data": "sensitive_secret_123", + } + ) + + # Find the tool error message + tool_messages = [m for m in result["messages"] if m.type == "tool"] + assert len(tool_messages) == 1 + tool_message = tool_messages[0] + assert tool_message.status == "error" + + # The error message should contain only the LLM-provided args (wrong_arg) + # and NOT the system-injected state (secret_data) + assert "{'wrong_arg': 'value'}" in tool_message.content + assert "secret_data" not in tool_message.content + assert "sensitive_secret_123" not in tool_message.content + + +@pytest.mark.skipif( + sys.version_info >= (3, 14), reason="Pydantic model rebuild issue in Python 3.14" +) +async def test_tool_invocation_error_excludes_injected_state_async() -> None: + """Test that async tool invocation errors only include LLM-controllable arguments. + This test verifies that the async execution path (_execute_tool_async and _arun_one) + properly filters validation errors to exclude system-injected arguments, ensuring + the LLM receives only relevant context for correction. + """ + + # Define a custom state schema + class TestState(AgentState): + internal_data: str + + @dec_tool + async def async_tool_with_injected_state( + query: str, + max_results: int, + state: Annotated[TestState, InjectedState], + ) -> str: + """Async tool that uses injected state.""" + return f"query: {query}, max_results: {max_results}" + + # Create a fake model that makes an incorrect tool call + # - query has wrong type (int instead of str) + # - max_results is missing + model = FakeToolCallingModel( + tool_calls=[ + [ + { + "name": "async_tool_with_injected_state", + "args": {"query": 999}, # Wrong type, missing max_results + "id": "call_async_1", + } + ], + [], # End the loop + ] + ) + + # Create an agent with the async tool + agent = create_agent( + model=model, + tools=[async_tool_with_injected_state], + state_schema=TestState, + ) + + # Invoke with state data + result = await agent.ainvoke( + { + "messages": [HumanMessage("Test async")], + "internal_data": "secret_internal_value_xyz", + } + ) + + # Find the tool error message + tool_messages = [m for m in result["messages"] if m.type == "tool"] + assert len(tool_messages) == 1 + tool_message = tool_messages[0] + assert tool_message.status == "error" + + # Verify error mentions LLM-controlled parameters only + content = tool_message.content + assert "query" in content.lower(), "Error should mention 'query' (LLM-controlled)" + assert "max_results" in content.lower(), "Error should mention 'max_results' (LLM-controlled)" + + # Verify system-injected state does not appear in the validation errors + # This keeps the error focused on what the LLM can actually fix + assert "internal_data" not in content, ( + "Error should NOT mention 'internal_data' (system-injected field)" + ) + assert "secret_internal_value" not in content, ( + "Error should NOT contain system-injected state values" + ) + + # Verify only LLM-controlled parameters are in the error list + # Should see "query" and "max_results" errors, but not "state" + lines = content.split("\n") + error_lines = [line.strip() for line in lines if line.strip()] + # Find lines that look like field names (single words at start of line) + field_errors = [ + line + for line in error_lines + if line + and not line.startswith("input") + and not line.startswith("field") + and not line.startswith("error") + and not line.startswith("please") + and len(line.split()) <= 2 + ] + # Verify system-injected 'state' is not in the field error list + assert not any("state" == field.lower() for field in field_errors), ( + "The field 'state' (system-injected) should not appear in validation errors" + ) + + +@pytest.mark.skipif( + sys.version_info >= (3, 14), reason="Pydantic model rebuild issue in Python 3.14" +) +async def test_create_agent_error_content_with_multiple_params() -> None: + """Test that error messages only include LLM-controlled parameter errors. + Uses create_agent to verify that when a tool with both LLM-controlled + and system-injected parameters receives invalid arguments, the error message: + 1. Contains details about LLM-controlled parameter errors (query, limit) + 2. Does NOT contain system-injected parameter names (state, store, runtime) + 3. Does NOT contain values from system-injected parameters + 4. Properly formats the validation errors for LLM correction + This ensures the LLM receives focused, actionable feedback. + """ + + class TestState(AgentState): + user_id: str + api_key: str + session_data: dict + + @dec_tool + def complex_tool( + query: str, + limit: int, + state: Annotated[TestState, InjectedState], + store: Annotated[BaseStore, InjectedStore()], + runtime: ToolRuntime, + ) -> str: + """A complex tool with multiple injected and non-injected parameters. + Args: + query: The search query string. + limit: Maximum number of results to return. + state: The graph state (injected). + store: The persistent store (injected). + runtime: The tool runtime context (injected). + """ + # Access injected params to verify they work in normal execution + user = state.get("user_id", "unknown") + return f"Results for '{query}' (limit={limit}, user={user})" + + # Create a model that makes an incorrect tool call with multiple errors: + # - query is wrong type (int instead of str) + # - limit is missing + # Then returns no tool calls to end the loop + model = FakeToolCallingModel( + tool_calls=[ + [ + { + "name": "complex_tool", + "args": { + "query": 12345, # Wrong type - should be str + # "limit" is missing - required field + }, + "id": "call_complex_1", + } + ], + [], # No tool calls on second iteration to end the loop + ] + ) + + # Create an agent with the complex tool and custom state + # Need to provide a store since the tool uses InjectedStore + agent = create_agent( + model=model, + tools=[complex_tool], + state_schema=TestState, + store=InMemoryStore(), + ) + + # Invoke with sensitive data in state + result = agent.invoke( + { + "messages": [HumanMessage("Search for something")], + "user_id": "user_12345", + "api_key": "sk-secret-key-abc123xyz", + "session_data": {"token": "secret_session_token"}, + } + ) + + # Find the tool error message + tool_messages = [m for m in result["messages"] if m.type == "tool"] + assert len(tool_messages) == 1 + tool_message = tool_messages[0] + assert tool_message.status == "error" + assert tool_message.tool_call_id == "call_complex_1" + + content = tool_message.content + + # Verify error mentions LLM-controlled parameter issues + assert "query" in content.lower(), "Error should mention 'query' (LLM-controlled)" + assert "limit" in content.lower(), "Error should mention 'limit' (LLM-controlled)" + + # Should indicate validation errors occurred + assert "validation error" in content.lower() or "error" in content.lower(), ( + "Error should indicate validation occurred" + ) + + # Verify NO system-injected parameter names appear in error + # These are not controlled by the LLM and should be excluded + assert "state" not in content.lower(), "Error should NOT mention 'state' (system-injected)" + assert "store" not in content.lower(), "Error should NOT mention 'store' (system-injected)" + assert "runtime" not in content.lower(), "Error should NOT mention 'runtime' (system-injected)" + + # Verify NO values from system-injected parameters appear in error + # The LLM doesn't control these, so they shouldn't distract from the actual issues + assert "user_12345" not in content, "Error should NOT contain user_id value (from state)" + assert "sk-secret-key" not in content, "Error should NOT contain api_key value (from state)" + assert "secret_session_token" not in content, ( + "Error should NOT contain session_data value (from state)" + ) + + # Verify the LLM's original tool call args are present + # The error should show what the LLM actually provided to help it correct the mistake + assert "12345" in content, "Error should show the invalid query value provided by LLM (12345)" + + # Check error is well-formatted + assert "complex_tool" in content, "Error should mention the tool name" + + +@pytest.mark.skipif( + sys.version_info >= (3, 14), reason="Pydantic model rebuild issue in Python 3.14" +) +async def test_create_agent_error_only_model_controllable_params() -> None: + """Test that errors only include LLM-controllable parameter issues. + Focused test ensuring that validation errors for LLM-controlled parameters + are clearly reported, while system-injected parameters remain completely + absent from error messages. This provides focused feedback to the LLM. + """ + + class StateWithSecrets(AgentState): + password: str # Example of data not controlled by LLM + + @dec_tool + def secure_tool( + username: str, + email: str, + state: Annotated[StateWithSecrets, InjectedState], + ) -> str: + """Tool that validates user credentials. + Args: + username: The username (3-20 chars). + email: The email address. + state: State with password (system-injected). + """ + return f"Validated {username} with email {email}" + + # LLM provides invalid username (too short) and invalid email + model = FakeToolCallingModel( + tool_calls=[ + [ + { + "name": "secure_tool", + "args": { + "username": "ab", # Too short (needs 3-20) + "email": "not-an-email", # Invalid format + }, + "id": "call_secure_1", + } + ], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[secure_tool], + state_schema=StateWithSecrets, + ) + + result = agent.invoke( + { + "messages": [HumanMessage("Create account")], + "password": "super_secret_password_12345", + } + ) + + tool_messages = [m for m in result["messages"] if m.type == "tool"] + assert len(tool_messages) == 1 + content = tool_messages[0].content + + # The error should mention LLM-controlled parameters + # Note: Pydantic's default validation may or may not catch format issues, + # but the parameters themselves should be present in error messages + assert "username" in content.lower() or "email" in content.lower(), ( + "Error should mention at least one LLM-controlled parameter" + ) + + # Password is system-injected and should not appear + # The LLM doesn't control it, so it shouldn't distract from the actual errors + assert "password" not in content.lower(), ( + "Error should NOT mention 'password' (system-injected parameter)" + ) + assert "super_secret_password" not in content, ( + "Error should NOT contain password value (from system-injected state)" + ) diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_handler_composition.py b/libs/langchain_v1/tests/unit_tests/agents/test_handler_composition.py index d8835a90be5..efecfcd2023 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/test_handler_composition.py +++ b/libs/langchain_v1/tests/unit_tests/agents/test_handler_composition.py @@ -4,14 +4,14 @@ import pytest from langchain_core.messages import AIMessage from langchain.agents.factory import _chain_model_call_handlers -from langchain.agents.middleware.types import ModelRequest +from langchain.agents.middleware.types import ModelRequest, ModelResponse from typing import cast from langgraph.runtime import Runtime def create_test_request(**kwargs): - """Helper to create a ModelRequest with sensible defaults.""" + """Helper to create a `ModelRequest` with sensible defaults.""" defaults = { "messages": [], @@ -27,8 +27,17 @@ def create_test_request(**kwargs): return ModelRequest(**defaults) +def create_mock_base_handler(content="test"): + """Helper to create a base handler that returns `ModelResponse`.""" + + def mock_base_handler(req): + return ModelResponse(result=[AIMessage(content=content)], structured_response=None) + + return mock_base_handler + + class TestChainModelCallHandlers: - """Test the _chain_model_call_handlers composition function.""" + """Test the `_chain_model_call_handlers` composition function.""" def test_empty_handlers_returns_none(self) -> None: """Test that empty handlers list returns None.""" @@ -36,13 +45,15 @@ class TestChainModelCallHandlers: assert result is None def test_single_handler_returns_unchanged(self) -> None: - """Test that single handler is returned without composition.""" + """Test that single handler is wrapped to normalize output.""" - def handler(request, state, runtime, base_handler): + def handler(request, base_handler): return base_handler(request) result = _chain_model_call_handlers([handler]) - assert result is handler + # Result is wrapped to normalize, so it won't be identical + assert result is not None + assert callable(result) def test_two_handlers_basic_composition(self) -> None: """Test basic composition of two handlers.""" @@ -63,11 +74,7 @@ class TestChainModelCallHandlers: composed = _chain_model_call_handlers([outer, inner]) assert composed is not None - # Execute the composed handler - def mock_base_handler(req): - return AIMessage(content="test") - - result = composed(create_test_request(), mock_base_handler) + result = composed(create_test_request(), create_mock_base_handler()) assert execution_order == [ "outer-before", @@ -75,7 +82,9 @@ class TestChainModelCallHandlers: "inner-after", "outer-after", ] - assert result.content == "test" + # Result is now ModelResponse + assert isinstance(result, ModelResponse) + assert result.result[0].content == "test" def test_three_handlers_composition(self) -> None: """Test composition of three handlers.""" @@ -102,10 +111,7 @@ class TestChainModelCallHandlers: composed = _chain_model_call_handlers([first, second, third]) assert composed is not None - def mock_base_handler(req): - return AIMessage(content="test") - - result = composed(create_test_request(), mock_base_handler) + result = composed(create_test_request(), create_mock_base_handler()) # First wraps second wraps third assert execution_order == [ @@ -116,7 +122,8 @@ class TestChainModelCallHandlers: "second-after", "first-after", ] - assert result.content == "test" + assert isinstance(result, ModelResponse) + assert result.result[0].content == "test" def test_inner_handler_retry(self) -> None: """Test inner handler retrying before outer sees response.""" @@ -144,12 +151,13 @@ class TestChainModelCallHandlers: call_count["value"] += 1 if call_count["value"] < 3: raise ValueError("fail") - return AIMessage(content="success") + return ModelResponse(result=[AIMessage(content="success")], structured_response=None) result = composed(create_test_request(), mock_base_handler) assert inner_attempts == [0, 1, 2] - assert result.content == "success" + assert isinstance(result, ModelResponse) + assert result.result[0].content == "success" def test_error_to_success_conversion(self) -> None: """Test handler converting error to success response.""" @@ -158,6 +166,7 @@ class TestChainModelCallHandlers: try: return handler(request) except Exception: + # Middleware can return AIMessage - it will be normalized to ModelResponse return AIMessage(content="Fallback response") def inner_passthrough(request, handler): @@ -171,7 +180,10 @@ class TestChainModelCallHandlers: result = composed(create_test_request(), mock_base_handler) - assert result.content == "Fallback response" + # AIMessage was automatically converted to ModelResponse + assert isinstance(result, ModelResponse) + assert result.result[0].content == "Fallback response" + assert result.structured_response is None def test_request_modification(self) -> None: """Test handlers modifying the request.""" @@ -190,13 +202,11 @@ class TestChainModelCallHandlers: composed = _chain_model_call_handlers([outer_add_context, inner_track_request]) assert composed is not None - def mock_base_handler(req): - return AIMessage(content="response") - - result = composed(create_test_request(), mock_base_handler) + result = composed(create_test_request(), create_mock_base_handler(content="response")) assert requests_seen == ["Added by outer"] - assert result.content == "response" + assert isinstance(result, ModelResponse) + assert result.result[0].content == "response" def test_composition_preserves_state_and_runtime(self) -> None: """Test that state and runtime are passed through composition.""" @@ -219,19 +229,17 @@ class TestChainModelCallHandlers: test_state = {"test": "state"} test_runtime = {"test": "runtime"} - def mock_base_handler(req): - return AIMessage(content="test") - # Create request with state and runtime test_request = create_test_request() test_request.state = test_state test_request.runtime = test_runtime - result = composed(test_request, mock_base_handler) + result = composed(test_request, create_mock_base_handler()) # Both handlers should see same state and runtime assert state_values == [("outer", test_state), ("inner", test_state)] assert runtime_values == [("outer", test_runtime), ("inner", test_runtime)] - assert result.content == "test" + assert isinstance(result, ModelResponse) + assert result.result[0].content == "test" def test_multiple_yields_in_retry_loop(self) -> None: """Test handler that retries multiple times.""" @@ -257,11 +265,12 @@ class TestChainModelCallHandlers: attempt["value"] += 1 if attempt["value"] == 1: raise ValueError("fail") - return AIMessage(content="ok") + return ModelResponse(result=[AIMessage(content="ok")], structured_response=None) result = composed(create_test_request(), mock_base_handler) # Outer called once, inner retried so base handler called twice assert call_count["value"] == 1 assert attempt["value"] == 2 - assert result.content == "ok" + assert isinstance(result, ModelResponse) + assert result.result[0].content == "ok" diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_injected_runtime_create_agent.py b/libs/langchain_v1/tests/unit_tests/agents/test_injected_runtime_create_agent.py new file mode 100644 index 00000000000..4b75e45f925 --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/test_injected_runtime_create_agent.py @@ -0,0 +1,591 @@ +"""Test ToolRuntime injection with create_agent. + +This module tests the injected runtime functionality when using tools +with the create_agent factory. The ToolRuntime provides tools access to: +- state: Current graph state +- tool_call_id: ID of the current tool call +- config: RunnableConfig for the execution +- context: Runtime context from LangGraph +- store: BaseStore for persistent storage +- stream_writer: For streaming custom output + +These tests verify that runtime injection works correctly across both +sync and async execution paths, with middleware, and in various agent +configurations. +""" + +from __future__ import annotations + +from typing import Any + +import pytest +from langchain_core.messages import AIMessage, HumanMessage, ToolMessage +from langchain_core.tools import tool +from langgraph.store.memory import InMemoryStore + +from langchain.agents import create_agent +from langchain.agents.middleware.types import AgentState +from langchain.tools import ToolRuntime + +from .model import FakeToolCallingModel + + +def test_tool_runtime_basic_injection() -> None: + """Test basic ToolRuntime injection in tools with create_agent.""" + # Track what was injected + injected_data = {} + + @tool + def runtime_tool(x: int, runtime: ToolRuntime) -> str: + """Tool that accesses runtime context.""" + injected_data["state"] = runtime.state + injected_data["tool_call_id"] = runtime.tool_call_id + injected_data["config"] = runtime.config + injected_data["context"] = runtime.context + injected_data["store"] = runtime.store + injected_data["stream_writer"] = runtime.stream_writer + return f"Processed {x}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {"x": 42}, "id": "call_123", "name": "runtime_tool"}], + [], + ] + ), + tools=[runtime_tool], + system_prompt="You are a helpful assistant.", + ) + + result = agent.invoke({"messages": [HumanMessage("Test")]}) + + # Verify tool executed + assert len(result["messages"]) == 4 + tool_message = result["messages"][2] + assert isinstance(tool_message, ToolMessage) + assert tool_message.content == "Processed 42" + assert tool_message.tool_call_id == "call_123" + + # Verify runtime was injected + assert injected_data["state"] is not None + assert "messages" in injected_data["state"] + assert injected_data["tool_call_id"] == "call_123" + assert injected_data["config"] is not None + # Context, store, stream_writer may be None depending on graph setup + assert "context" in injected_data + assert "store" in injected_data + assert "stream_writer" in injected_data + + +async def test_tool_runtime_async_injection() -> None: + """Test ToolRuntime injection works with async tools.""" + injected_data = {} + + @tool + async def async_runtime_tool(x: int, runtime: ToolRuntime) -> str: + """Async tool that accesses runtime context.""" + injected_data["state"] = runtime.state + injected_data["tool_call_id"] = runtime.tool_call_id + injected_data["config"] = runtime.config + return f"Async processed {x}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {"x": 99}, "id": "async_call_456", "name": "async_runtime_tool"}], + [], + ] + ), + tools=[async_runtime_tool], + system_prompt="You are a helpful assistant.", + ) + + result = await agent.ainvoke({"messages": [HumanMessage("Test async")]}) + + # Verify tool executed + assert len(result["messages"]) == 4 + tool_message = result["messages"][2] + assert isinstance(tool_message, ToolMessage) + assert tool_message.content == "Async processed 99" + assert tool_message.tool_call_id == "async_call_456" + + # Verify runtime was injected + assert injected_data["state"] is not None + assert "messages" in injected_data["state"] + assert injected_data["tool_call_id"] == "async_call_456" + assert injected_data["config"] is not None + + +def test_tool_runtime_state_access() -> None: + """Test that tools can access and use state via ToolRuntime.""" + + @tool + def state_aware_tool(query: str, runtime: ToolRuntime) -> str: + """Tool that uses state to provide context-aware responses.""" + messages = runtime.state.get("messages", []) + msg_count = len(messages) + return f"Query: {query}, Message count: {msg_count}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {"query": "test"}, "id": "state_call", "name": "state_aware_tool"}], + [], + ] + ), + tools=[state_aware_tool], + system_prompt="You are a helpful assistant.", + ) + + result = agent.invoke({"messages": [HumanMessage("Hello"), HumanMessage("World")]}) + + # Check that tool accessed state correctly + tool_message = result["messages"][3] + assert isinstance(tool_message, ToolMessage) + # Should have original 2 HumanMessages + 1 AIMessage before tool execution + assert "Message count: 3" in tool_message.content + + +def test_tool_runtime_with_store() -> None: + """Test ToolRuntime provides access to store.""" + # Note: create_agent doesn't currently expose a store parameter, + # so runtime.store will be None in this test. + # This test demonstrates the runtime injection works correctly. + + @tool + def store_tool(key: str, value: str, runtime: ToolRuntime) -> str: + """Tool that uses store.""" + if runtime.store is None: + return f"No store (key={key}, value={value})" + runtime.store.put(("test",), key, {"data": value}) + return f"Stored {key}={value}" + + @tool + def check_runtime_tool(runtime: ToolRuntime) -> str: + """Tool that checks runtime availability.""" + has_store = runtime.store is not None + has_context = runtime.context is not None + return f"Runtime: store={has_store}, context={has_context}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {"key": "foo", "value": "bar"}, "id": "call_1", "name": "store_tool"}], + [{"args": {}, "id": "call_2", "name": "check_runtime_tool"}], + [], + ] + ), + tools=[store_tool, check_runtime_tool], + system_prompt="You are a helpful assistant.", + ) + + result = agent.invoke({"messages": [HumanMessage("Test store")]}) + + # Find the tool messages + tool_messages = [msg for msg in result["messages"] if isinstance(msg, ToolMessage)] + assert len(tool_messages) == 2 + + # First tool indicates no store is available (expected since create_agent doesn't expose store) + assert "No store" in tool_messages[0].content + + # Second tool confirms runtime was injected + assert "Runtime:" in tool_messages[1].content + + +def test_tool_runtime_with_multiple_tools() -> None: + """Test multiple tools can all access ToolRuntime.""" + call_log = [] + + @tool + def tool_a(x: int, runtime: ToolRuntime) -> str: + """First tool.""" + call_log.append(("tool_a", runtime.tool_call_id, x)) + return f"A: {x}" + + @tool + def tool_b(y: str, runtime: ToolRuntime) -> str: + """Second tool.""" + call_log.append(("tool_b", runtime.tool_call_id, y)) + return f"B: {y}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [ + {"args": {"x": 1}, "id": "call_a", "name": "tool_a"}, + {"args": {"y": "test"}, "id": "call_b", "name": "tool_b"}, + ], + [], + ] + ), + tools=[tool_a, tool_b], + system_prompt="You are a helpful assistant.", + ) + + result = agent.invoke({"messages": [HumanMessage("Use both tools")]}) + + # Verify both tools were called with correct runtime + assert len(call_log) == 2 + # Tools may execute in parallel, so check both calls are present + call_ids = {(name, call_id) for name, call_id, _ in call_log} + assert ("tool_a", "call_a") in call_ids + assert ("tool_b", "call_b") in call_ids + + # Verify tool messages + tool_messages = [msg for msg in result["messages"] if isinstance(msg, ToolMessage)] + assert len(tool_messages) == 2 + contents = {msg.content for msg in tool_messages} + assert "A: 1" in contents + assert "B: test" in contents + + +def test_tool_runtime_config_access() -> None: + """Test tools can access config through ToolRuntime.""" + config_data = {} + + @tool + def config_tool(x: int, runtime: ToolRuntime) -> str: + """Tool that accesses config.""" + config_data["config_exists"] = runtime.config is not None + config_data["has_configurable"] = ( + "configurable" in runtime.config if runtime.config else False + ) + # Config may have run_id or other fields depending on execution context + if runtime.config: + config_data["config_keys"] = list(runtime.config.keys()) + return f"Config accessed for {x}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {"x": 5}, "id": "config_call", "name": "config_tool"}], + [], + ] + ), + tools=[config_tool], + system_prompt="You are a helpful assistant.", + ) + + result = agent.invoke({"messages": [HumanMessage("Test config")]}) + + # Verify config was accessible + assert config_data["config_exists"] is True + assert "config_keys" in config_data + + # Verify tool executed + tool_message = result["messages"][2] + assert isinstance(tool_message, ToolMessage) + assert tool_message.content == "Config accessed for 5" + + +def test_tool_runtime_with_custom_state() -> None: + """Test ToolRuntime works with custom state schemas.""" + from typing_extensions import Annotated, TypedDict + + from langchain.agents.middleware.types import AgentMiddleware + + class CustomState(AgentState): + custom_field: str + + runtime_state = {} + + @tool + def custom_state_tool(x: int, runtime: ToolRuntime) -> str: + """Tool that accesses custom state.""" + runtime_state["custom_field"] = runtime.state.get("custom_field", "not found") + return f"Custom: {x}" + + class CustomMiddleware(AgentMiddleware): + state_schema = CustomState + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {"x": 10}, "id": "custom_call", "name": "custom_state_tool"}], + [], + ] + ), + tools=[custom_state_tool], + system_prompt="You are a helpful assistant.", + middleware=[CustomMiddleware()], + ) + + result = agent.invoke( + {"messages": [HumanMessage("Test custom state")], "custom_field": "custom_value"} + ) + + # Verify custom field was accessible + assert runtime_state["custom_field"] == "custom_value" + + # Verify tool executed + tool_message = result["messages"][2] + assert isinstance(tool_message, ToolMessage) + assert tool_message.content == "Custom: 10" + + +def test_tool_runtime_no_runtime_parameter() -> None: + """Test that tools without runtime parameter work normally.""" + + @tool + def regular_tool(x: int) -> str: + """Regular tool without runtime.""" + return f"Regular: {x}" + + @tool + def runtime_tool(y: int, runtime: ToolRuntime) -> str: + """Tool with runtime.""" + return f"Runtime: {y}, call_id: {runtime.tool_call_id}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [ + {"args": {"x": 1}, "id": "regular_call", "name": "regular_tool"}, + {"args": {"y": 2}, "id": "runtime_call", "name": "runtime_tool"}, + ], + [], + ] + ), + tools=[regular_tool, runtime_tool], + system_prompt="You are a helpful assistant.", + ) + + result = agent.invoke({"messages": [HumanMessage("Test mixed tools")]}) + + # Verify both tools executed correctly + tool_messages = [msg for msg in result["messages"] if isinstance(msg, ToolMessage)] + assert len(tool_messages) == 2 + assert tool_messages[0].content == "Regular: 1" + assert "Runtime: 2, call_id: runtime_call" in tool_messages[1].content + + +async def test_tool_runtime_parallel_execution() -> None: + """Test ToolRuntime injection works with parallel tool execution.""" + execution_log = [] + + @tool + async def parallel_tool_1(x: int, runtime: ToolRuntime) -> str: + """First parallel tool.""" + execution_log.append(("tool_1", runtime.tool_call_id, x)) + return f"Tool1: {x}" + + @tool + async def parallel_tool_2(y: int, runtime: ToolRuntime) -> str: + """Second parallel tool.""" + execution_log.append(("tool_2", runtime.tool_call_id, y)) + return f"Tool2: {y}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [ + {"args": {"x": 10}, "id": "parallel_1", "name": "parallel_tool_1"}, + {"args": {"y": 20}, "id": "parallel_2", "name": "parallel_tool_2"}, + ], + [], + ] + ), + tools=[parallel_tool_1, parallel_tool_2], + system_prompt="You are a helpful assistant.", + ) + + result = await agent.ainvoke({"messages": [HumanMessage("Run parallel")]}) + + # Verify both tools executed + assert len(execution_log) == 2 + + # Find the tool messages (order may vary due to parallel execution) + tool_messages = [msg for msg in result["messages"] if isinstance(msg, ToolMessage)] + assert len(tool_messages) == 2 + + contents = {msg.content for msg in tool_messages} + assert "Tool1: 10" in contents + assert "Tool2: 20" in contents + + call_ids = {msg.tool_call_id for msg in tool_messages} + assert "parallel_1" in call_ids + assert "parallel_2" in call_ids + + +def test_tool_runtime_error_handling() -> None: + """Test error handling with ToolRuntime injection.""" + + @tool + def error_tool(x: int, runtime: ToolRuntime) -> str: + """Tool that may error.""" + # Access runtime to ensure it's injected even during errors + _ = runtime.tool_call_id + if x == 0: + msg = "Cannot process zero" + raise ValueError(msg) + return f"Processed: {x}" + + # create_agent uses default error handling which doesn't catch ValueError + # So we need to handle this differently + @tool + def safe_tool(x: int, runtime: ToolRuntime) -> str: + """Tool that handles errors safely.""" + try: + if x == 0: + return "Error: Cannot process zero" + return f"Processed: {x}" + except Exception as e: + return f"Error: {e}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {"x": 0}, "id": "error_call", "name": "safe_tool"}], + [{"args": {"x": 5}, "id": "success_call", "name": "safe_tool"}], + [], + ] + ), + tools=[safe_tool], + system_prompt="You are a helpful assistant.", + ) + + result = agent.invoke({"messages": [HumanMessage("Test error handling")]}) + + # Both tool calls should complete + tool_messages = [msg for msg in result["messages"] if isinstance(msg, ToolMessage)] + assert len(tool_messages) == 2 + + # First call returned error message + assert "Error:" in tool_messages[0].content or "Cannot process zero" in tool_messages[0].content + + # Second call succeeded + assert "Processed: 5" in tool_messages[1].content + + +def test_tool_runtime_with_middleware() -> None: + """Test ToolRuntime injection works with agent middleware.""" + from typing import Any + + from langchain.agents.middleware.types import AgentMiddleware + + middleware_calls = [] + runtime_calls = [] + + class TestMiddleware(AgentMiddleware): + def before_model(self, state, runtime) -> dict[str, Any]: + middleware_calls.append("before_model") + return {} + + def after_model(self, state, runtime) -> dict[str, Any]: + middleware_calls.append("after_model") + return {} + + @tool + def middleware_tool(x: int, runtime: ToolRuntime) -> str: + """Tool with runtime in middleware agent.""" + runtime_calls.append(("middleware_tool", runtime.tool_call_id)) + return f"Middleware result: {x}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {"x": 7}, "id": "mw_call", "name": "middleware_tool"}], + [], + ] + ), + tools=[middleware_tool], + system_prompt="You are a helpful assistant.", + middleware=[TestMiddleware()], + ) + + result = agent.invoke({"messages": [HumanMessage("Test with middleware")]}) + + # Verify middleware ran + assert "before_model" in middleware_calls + assert "after_model" in middleware_calls + + # Verify tool with runtime executed + assert len(runtime_calls) == 1 + assert runtime_calls[0] == ("middleware_tool", "mw_call") + + # Verify result + tool_message = result["messages"][2] + assert isinstance(tool_message, ToolMessage) + assert tool_message.content == "Middleware result: 7" + + +def test_tool_runtime_type_hints() -> None: + """Test that ToolRuntime provides access to state fields.""" + typed_runtime = {} + + # Use ToolRuntime without generic type hints to avoid forward reference issues + @tool + def typed_runtime_tool(x: int, runtime: ToolRuntime) -> str: + """Tool with runtime access.""" + # Access state dict - verify we can access standard state fields + if isinstance(runtime.state, dict): + # Count messages in state + typed_runtime["message_count"] = len(runtime.state.get("messages", [])) + else: + typed_runtime["message_count"] = len(getattr(runtime.state, "messages", [])) + return f"Typed: {x}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {"x": 3}, "id": "typed_call", "name": "typed_runtime_tool"}], + [], + ] + ), + tools=[typed_runtime_tool], + system_prompt="You are a helpful assistant.", + ) + + result = agent.invoke({"messages": [HumanMessage("Test")]}) + + # Verify typed runtime worked - should see 2 messages (HumanMessage + AIMessage) before tool executes + assert typed_runtime["message_count"] == 2 + + tool_message = result["messages"][2] + assert isinstance(tool_message, ToolMessage) + assert tool_message.content == "Typed: 3" + + +def test_tool_runtime_name_based_injection() -> None: + """Test that parameter named 'runtime' gets injected without type annotation.""" + injected_data = {} + + @tool + def name_based_tool(x: int, runtime: Any) -> str: + """Tool with 'runtime' parameter without ToolRuntime type annotation.""" + # Even though type is Any, runtime should still be injected as ToolRuntime + injected_data["is_tool_runtime"] = isinstance(runtime, ToolRuntime) + injected_data["has_state"] = hasattr(runtime, "state") + injected_data["has_tool_call_id"] = hasattr(runtime, "tool_call_id") + if hasattr(runtime, "tool_call_id"): + injected_data["tool_call_id"] = runtime.tool_call_id + if hasattr(runtime, "state"): + injected_data["state"] = runtime.state + return f"Processed {x}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {"x": 42}, "id": "name_call_123", "name": "name_based_tool"}], + [], + ] + ), + tools=[name_based_tool], + system_prompt="You are a helpful assistant.", + ) + + result = agent.invoke({"messages": [HumanMessage("Test")]}) + + # Verify tool executed + assert len(result["messages"]) == 4 + tool_message = result["messages"][2] + assert isinstance(tool_message, ToolMessage) + assert tool_message.content == "Processed 42" + + # Verify runtime was injected based on parameter name + assert injected_data["is_tool_runtime"] is True + assert injected_data["has_state"] is True + assert injected_data["has_tool_call_id"] is True + assert injected_data["tool_call_id"] == "name_call_123" + assert injected_data["state"] is not None + assert "messages" in injected_data["state"] diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_middleware_agent.py b/libs/langchain_v1/tests/unit_tests/agents/test_middleware_agent.py index df696090b1d..02fa96e6b65 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/test_middleware_agent.py +++ b/libs/langchain_v1/tests/unit_tests/agents/test_middleware_agent.py @@ -4,6 +4,7 @@ from types import ModuleType from typing import Any from unittest.mock import patch +import sys import pytest from langchain_core.language_models import BaseChatModel from langchain_core.language_models.chat_models import BaseChatModel @@ -29,11 +30,11 @@ from syrupy.assertion import SnapshotAssertion from typing_extensions import Annotated from langchain.agents.middleware.human_in_the_loop import ( - ActionRequest, + Action, HumanInTheLoopMiddleware, ) -from langchain.agents.middleware.planning import ( - PlanningMiddleware, +from langchain.agents.middleware.todo import ( + TodoListMiddleware, PlanningState, WRITE_TODOS_SYSTEM_PROMPT, write_todos, @@ -44,7 +45,6 @@ from langchain.agents.middleware.model_call_limit import ( ModelCallLimitExceededError, ) from langchain.agents.middleware.model_fallback import ModelFallbackMiddleware -from langchain.agents.middleware.prompt_caching import AnthropicPromptCachingMiddleware from langchain.agents.middleware.summarization import SummarizationMiddleware from langchain.agents.middleware.types import ( AgentMiddleware, @@ -54,6 +54,8 @@ from langchain.agents.middleware.types import ( OmitFromInput, OmitFromOutput, PrivateStateAttr, + ModelResponse, + ModelCallResult, ) from langchain.agents.factory import create_agent from langchain.agents.structured_output import ToolStrategy @@ -463,14 +465,12 @@ def test_human_in_the_loop_middleware_initialization() -> None: """Test HumanInTheLoopMiddleware initialization.""" middleware = HumanInTheLoopMiddleware( - interrupt_on={ - "test_tool": {"allow_accept": True, "allow_edit": True, "allow_respond": True} - }, + interrupt_on={"test_tool": {"allowed_decisions": ["approve", "edit", "reject"]}}, description_prefix="Custom prefix", ) assert middleware.interrupt_on == { - "test_tool": {"allow_accept": True, "allow_edit": True, "allow_respond": True} + "test_tool": {"allowed_decisions": ["approve", "edit", "reject"]} } assert middleware.description_prefix == "Custom prefix" @@ -479,9 +479,7 @@ def test_human_in_the_loop_middleware_no_interrupts_needed() -> None: """Test HumanInTheLoopMiddleware when no interrupts are needed.""" middleware = HumanInTheLoopMiddleware( - interrupt_on={ - "test_tool": {"allow_respond": True, "allow_edit": True, "allow_accept": True} - } + interrupt_on={"test_tool": {"allowed_decisions": ["approve", "edit", "reject"]}} ) # Test with no messages @@ -508,9 +506,7 @@ def test_human_in_the_loop_middleware_single_tool_accept() -> None: """Test HumanInTheLoopMiddleware with single tool accept response.""" middleware = HumanInTheLoopMiddleware( - interrupt_on={ - "test_tool": {"allow_respond": True, "allow_edit": True, "allow_accept": True} - } + interrupt_on={"test_tool": {"allowed_decisions": ["approve", "edit", "reject"]}} ) ai_message = AIMessage( @@ -520,7 +516,7 @@ def test_human_in_the_loop_middleware_single_tool_accept() -> None: state = {"messages": [HumanMessage(content="Hello"), ai_message]} def mock_accept(requests): - return [{"type": "accept", "args": None}] + return {"decisions": [{"type": "approve"}]} with patch("langchain.agents.middleware.human_in_the_loop.interrupt", side_effect=mock_accept): result = middleware.after_model(state, None) @@ -543,9 +539,7 @@ def test_human_in_the_loop_middleware_single_tool_accept() -> None: def test_human_in_the_loop_middleware_single_tool_edit() -> None: """Test HumanInTheLoopMiddleware with single tool edit response.""" middleware = HumanInTheLoopMiddleware( - interrupt_on={ - "test_tool": {"allow_respond": True, "allow_edit": True, "allow_accept": True} - } + interrupt_on={"test_tool": {"allowed_decisions": ["approve", "edit", "reject"]}} ) ai_message = AIMessage( @@ -555,15 +549,17 @@ def test_human_in_the_loop_middleware_single_tool_edit() -> None: state = {"messages": [HumanMessage(content="Hello"), ai_message]} def mock_edit(requests): - return [ - { - "type": "edit", - "args": ActionRequest( - action="test_tool", - args={"input": "edited"}, - ), - } - ] + return { + "decisions": [ + { + "type": "edit", + "edited_action": Action( + name="test_tool", + args={"input": "edited"}, + ), + } + ] + } with patch("langchain.agents.middleware.human_in_the_loop.interrupt", side_effect=mock_edit): result = middleware.after_model(state, None) @@ -578,9 +574,7 @@ def test_human_in_the_loop_middleware_single_tool_response() -> None: """Test HumanInTheLoopMiddleware with single tool response with custom message.""" middleware = HumanInTheLoopMiddleware( - interrupt_on={ - "test_tool": {"allow_respond": True, "allow_edit": True, "allow_accept": True} - } + interrupt_on={"test_tool": {"allowed_decisions": ["approve", "edit", "reject"]}} ) ai_message = AIMessage( @@ -590,7 +584,7 @@ def test_human_in_the_loop_middleware_single_tool_response() -> None: state = {"messages": [HumanMessage(content="Hello"), ai_message]} def mock_response(requests): - return [{"type": "response", "args": "Custom response message"}] + return {"decisions": [{"type": "reject", "message": "Custom response message"}]} with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", side_effect=mock_response @@ -611,8 +605,8 @@ def test_human_in_the_loop_middleware_multiple_tools_mixed_responses() -> None: middleware = HumanInTheLoopMiddleware( interrupt_on={ - "get_forecast": {"allow_accept": True, "allow_edit": True, "allow_respond": True}, - "get_temperature": {"allow_accept": True, "allow_edit": True, "allow_respond": True}, + "get_forecast": {"allowed_decisions": ["approve", "edit", "reject"]}, + "get_temperature": {"allowed_decisions": ["approve", "edit", "reject"]}, } ) @@ -626,10 +620,12 @@ def test_human_in_the_loop_middleware_multiple_tools_mixed_responses() -> None: state = {"messages": [HumanMessage(content="What's the weather?"), ai_message]} def mock_mixed_responses(requests): - return [ - {"type": "accept", "args": None}, - {"type": "response", "args": "User rejected this tool call"}, - ] + return { + "decisions": [ + {"type": "approve"}, + {"type": "reject", "message": "User rejected this tool call"}, + ] + } with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", side_effect=mock_mixed_responses @@ -659,8 +655,8 @@ def test_human_in_the_loop_middleware_multiple_tools_edit_responses() -> None: middleware = HumanInTheLoopMiddleware( interrupt_on={ - "get_forecast": {"allow_accept": True, "allow_edit": True, "allow_respond": True}, - "get_temperature": {"allow_accept": True, "allow_edit": True, "allow_respond": True}, + "get_forecast": {"allowed_decisions": ["approve", "edit", "reject"]}, + "get_temperature": {"allowed_decisions": ["approve", "edit", "reject"]}, } ) @@ -674,22 +670,24 @@ def test_human_in_the_loop_middleware_multiple_tools_edit_responses() -> None: state = {"messages": [HumanMessage(content="What's the weather?"), ai_message]} def mock_edit_responses(requests): - return [ - { - "type": "edit", - "args": ActionRequest( - action="get_forecast", - args={"location": "New York"}, - ), - }, - { - "type": "edit", - "args": ActionRequest( - action="get_temperature", - args={"location": "New York"}, - ), - }, - ] + return { + "decisions": [ + { + "type": "edit", + "edited_action": Action( + name="get_forecast", + args={"location": "New York"}, + ), + }, + { + "type": "edit", + "edited_action": Action( + name="get_temperature", + args={"location": "New York"}, + ), + }, + ] + } with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", side_effect=mock_edit_responses @@ -710,9 +708,7 @@ def test_human_in_the_loop_middleware_edit_with_modified_args() -> None: """Test HumanInTheLoopMiddleware with edit action that includes modified args.""" middleware = HumanInTheLoopMiddleware( - interrupt_on={ - "test_tool": {"allow_accept": True, "allow_edit": True, "allow_respond": True} - } + interrupt_on={"test_tool": {"allowed_decisions": ["approve", "edit", "reject"]}} ) ai_message = AIMessage( @@ -722,15 +718,17 @@ def test_human_in_the_loop_middleware_edit_with_modified_args() -> None: state = {"messages": [HumanMessage(content="Hello"), ai_message]} def mock_edit_with_args(requests): - return [ - { - "type": "edit", - "args": ActionRequest( - action="test_tool", - args={"input": "modified"}, - ), - } - ] + return { + "decisions": [ + { + "type": "edit", + "edited_action": Action( + name="test_tool", + args={"input": "modified"}, + ), + } + ] + } with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", @@ -750,9 +748,7 @@ def test_human_in_the_loop_middleware_edit_with_modified_args() -> None: def test_human_in_the_loop_middleware_unknown_response_type() -> None: """Test HumanInTheLoopMiddleware with unknown response type.""" middleware = HumanInTheLoopMiddleware( - interrupt_on={ - "test_tool": {"allow_accept": True, "allow_edit": True, "allow_respond": True} - } + interrupt_on={"test_tool": {"allowed_decisions": ["approve", "edit", "reject"]}} ) ai_message = AIMessage( @@ -762,12 +758,12 @@ def test_human_in_the_loop_middleware_unknown_response_type() -> None: state = {"messages": [HumanMessage(content="Hello"), ai_message]} def mock_unknown(requests): - return [{"type": "unknown", "args": None}] + return {"decisions": [{"type": "unknown"}]} with patch("langchain.agents.middleware.human_in_the_loop.interrupt", side_effect=mock_unknown): with pytest.raises( ValueError, - match=r"Unexpected human response: {'type': 'unknown', 'args': None}. Response action 'unknown' is not allowed for tool 'test_tool'. Expected one of \['accept', 'edit', 'response'\] based on the tool's configuration.", + match=r"Unexpected human decision: {'type': 'unknown'}. Decision type 'unknown' is not allowed for tool 'test_tool'. Expected one of \['approve', 'edit', 'reject'\] based on the tool's configuration.", ): middleware.after_model(state, None) @@ -777,9 +773,7 @@ def test_human_in_the_loop_middleware_disallowed_action() -> None: # edit is not allowed by tool config middleware = HumanInTheLoopMiddleware( - interrupt_on={ - "test_tool": {"allow_respond": True, "allow_edit": False, "allow_accept": True} - } + interrupt_on={"test_tool": {"allowed_decisions": ["approve", "reject"]}} ) ai_message = AIMessage( @@ -789,15 +783,17 @@ def test_human_in_the_loop_middleware_disallowed_action() -> None: state = {"messages": [HumanMessage(content="Hello"), ai_message]} def mock_disallowed_action(requests): - return [ - { - "type": "edit", - "args": ActionRequest( - action="test_tool", - args={"input": "modified"}, - ), - } - ] + return { + "decisions": [ + { + "type": "edit", + "edited_action": Action( + name="test_tool", + args={"input": "modified"}, + ), + } + ] + } with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", @@ -805,7 +801,7 @@ def test_human_in_the_loop_middleware_disallowed_action() -> None: ): with pytest.raises( ValueError, - match=r"Unexpected human response: {'type': 'edit', 'args': {'action': 'test_tool', 'args': {'input': 'modified'}}}. Response action 'edit' is not allowed for tool 'test_tool'. Expected one of \['accept', 'response'\] based on the tool's configuration.", + match=r"Unexpected human decision: {'type': 'edit', 'edited_action': {'name': 'test_tool', 'args': {'input': 'modified'}}}. Decision type 'edit' is not allowed for tool 'test_tool'. Expected one of \['approve', 'reject'\] based on the tool's configuration.", ): middleware.after_model(state, None) @@ -814,9 +810,7 @@ def test_human_in_the_loop_middleware_mixed_auto_approved_and_interrupt() -> Non """Test HumanInTheLoopMiddleware with mix of auto-approved and interrupt tools.""" middleware = HumanInTheLoopMiddleware( - interrupt_on={ - "interrupt_tool": {"allow_respond": True, "allow_edit": True, "allow_accept": True} - } + interrupt_on={"interrupt_tool": {"allowed_decisions": ["approve", "edit", "reject"]}} ) ai_message = AIMessage( @@ -829,7 +823,7 @@ def test_human_in_the_loop_middleware_mixed_auto_approved_and_interrupt() -> Non state = {"messages": [HumanMessage(content="Hello"), ai_message]} def mock_accept(requests): - return [{"type": "accept", "args": None}] + return {"decisions": [{"type": "approve"}]} with patch("langchain.agents.middleware.human_in_the_loop.interrupt", side_effect=mock_accept): result = middleware.after_model(state, None) @@ -848,9 +842,7 @@ def test_human_in_the_loop_middleware_interrupt_request_structure() -> None: """Test that interrupt requests are structured correctly.""" middleware = HumanInTheLoopMiddleware( - interrupt_on={ - "test_tool": {"allow_accept": True, "allow_edit": True, "allow_respond": True} - }, + interrupt_on={"test_tool": {"allowed_decisions": ["approve", "edit", "reject"]}}, description_prefix="Custom prefix", ) @@ -860,31 +852,34 @@ def test_human_in_the_loop_middleware_interrupt_request_structure() -> None: ) state = {"messages": [HumanMessage(content="Hello"), ai_message]} - captured_requests = [] + captured_request = None - def mock_capture_requests(requests): - captured_requests.extend(requests) - return [{"type": "accept", "args": None}] + def mock_capture_requests(request): + nonlocal captured_request + captured_request = request + return {"decisions": [{"type": "approve"}]} with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", side_effect=mock_capture_requests ): middleware.after_model(state, None) - assert len(captured_requests) == 1 - request = captured_requests[0] + assert captured_request is not None + assert "action_requests" in captured_request + assert "review_configs" in captured_request - assert "action_request" in request - assert "config" in request - assert "description" in request + assert len(captured_request["action_requests"]) == 1 + action_request = captured_request["action_requests"][0] + assert action_request["name"] == "test_tool" + assert action_request["args"] == {"input": "test", "location": "SF"} + assert "Custom prefix" in action_request["description"] + assert "Tool: test_tool" in action_request["description"] + assert "Args: {'input': 'test', 'location': 'SF'}" in action_request["description"] - assert request["action_request"]["action"] == "test_tool" - assert request["action_request"]["args"] == {"input": "test", "location": "SF"} - expected_config = {"allow_accept": True, "allow_edit": True, "allow_respond": True} - assert request["config"] == expected_config - assert "Custom prefix" in request["description"] - assert "Tool: test_tool" in request["description"] - assert "Args: {'input': 'test', 'location': 'SF'}" in request["description"] + assert len(captured_request["review_configs"]) == 1 + review_config = captured_request["review_configs"][0] + assert review_config["action_name"] == "test_tool" + assert review_config["allowed_decisions"] == ["approve", "edit", "reject"] def test_human_in_the_loop_middleware_boolean_configs() -> None: @@ -900,7 +895,7 @@ def test_human_in_the_loop_middleware_boolean_configs() -> None: # Test accept with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", - return_value=[{"type": "accept", "args": None}], + return_value={"decisions": [{"type": "approve"}]}, ): result = middleware.after_model(state, None) assert result is not None @@ -911,15 +906,17 @@ def test_human_in_the_loop_middleware_boolean_configs() -> None: # Test edit with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", - return_value=[ - { - "type": "edit", - "args": ActionRequest( - action="test_tool", - args={"input": "edited"}, - ), - } - ], + return_value={ + "decisions": [ + { + "type": "edit", + "edited_action": Action( + name="test_tool", + args={"input": "edited"}, + ), + } + ] + }, ): result = middleware.after_model(state, None) assert result is not None @@ -947,25 +944,27 @@ def test_human_in_the_loop_middleware_sequence_mismatch() -> None: # Test with too few responses with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", - return_value=[], # No responses for 1 tool call + return_value={"decisions": []}, # No responses for 1 tool call ): with pytest.raises( ValueError, - match=r"Number of human responses \(0\) does not match number of hanging tool calls \(1\)\.", + match=r"Number of human decisions \(0\) does not match number of hanging tool calls \(1\)\.", ): middleware.after_model(state, None) # Test with too many responses with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", - return_value=[ - {"type": "accept", "args": None}, - {"type": "accept", "args": None}, - ], # 2 responses for 1 tool call + return_value={ + "decisions": [ + {"type": "approve"}, + {"type": "approve"}, + ] + }, # 2 responses for 1 tool call ): with pytest.raises( ValueError, - match=r"Number of human responses \(2\) does not match number of hanging tool calls \(1\)\.", + match=r"Number of human decisions \(2\) does not match number of hanging tool calls \(1\)\.", ): middleware.after_model(state, None) @@ -979,8 +978,14 @@ def test_human_in_the_loop_middleware_description_as_callable() -> None: middleware = HumanInTheLoopMiddleware( interrupt_on={ - "tool_with_callable": {"allow_accept": True, "description": custom_description}, - "tool_with_string": {"allow_accept": True, "description": "Static description"}, + "tool_with_callable": { + "allowed_decisions": ["approve"], + "description": custom_description, + }, + "tool_with_string": { + "allowed_decisions": ["approve"], + "description": "Static description", + }, } ) @@ -993,135 +998,30 @@ def test_human_in_the_loop_middleware_description_as_callable() -> None: ) state = {"messages": [HumanMessage(content="Hello"), ai_message]} - captured_requests = [] + captured_request = None - def mock_capture_requests(requests): - captured_requests.extend(requests) - return [{"type": "accept"}, {"type": "accept"}] + def mock_capture_requests(request): + nonlocal captured_request + captured_request = request + return {"decisions": [{"type": "approve"}, {"type": "approve"}]} with patch( "langchain.agents.middleware.human_in_the_loop.interrupt", side_effect=mock_capture_requests ): middleware.after_model(state, None) - assert len(captured_requests) == 2 + assert captured_request is not None + assert "action_requests" in captured_request + assert len(captured_request["action_requests"]) == 2 # Check callable description assert ( - captured_requests[0]["description"] == "Custom: tool_with_callable with args {'x': 1}" + captured_request["action_requests"][0]["description"] + == "Custom: tool_with_callable with args {'x': 1}" ) # Check string description - assert captured_requests[1]["description"] == "Static description" - - -# Tests for AnthropicPromptCachingMiddleware -def test_anthropic_prompt_caching_middleware_initialization() -> None: - """Test AnthropicPromptCachingMiddleware initialization.""" - # Test with custom values - middleware = AnthropicPromptCachingMiddleware( - type="ephemeral", ttl="1h", min_messages_to_cache=5 - ) - assert middleware.type == "ephemeral" - assert middleware.ttl == "1h" - assert middleware.min_messages_to_cache == 5 - - # Test with default values - middleware = AnthropicPromptCachingMiddleware() - assert middleware.type == "ephemeral" - assert middleware.ttl == "5m" - assert middleware.min_messages_to_cache == 0 - - fake_request = ModelRequest( - model=FakeToolCallingModel(), - messages=[HumanMessage("Hello")], - system_prompt=None, - tool_choice=None, - tools=[], - response_format=None, - state={"messages": [HumanMessage("Hello")]}, - runtime=cast(Runtime, object()), - model_settings={}, - ) - - def mock_handler(req: ModelRequest) -> AIMessage: - return AIMessage(content="mock response", **req.model_settings) - - result = middleware.wrap_model_call(fake_request, mock_handler) - # Check that model_settings were passed through via the request - assert fake_request.model_settings == {"cache_control": {"type": "ephemeral", "ttl": "5m"}} - - -def test_anthropic_prompt_caching_middleware_unsupported_model() -> None: - """Test AnthropicPromptCachingMiddleware with unsupported model.""" - from typing import cast - - fake_request = ModelRequest( - model=FakeToolCallingModel(), - messages=[HumanMessage("Hello")], - system_prompt=None, - tool_choice=None, - tools=[], - response_format=None, - state={"messages": [HumanMessage("Hello")]}, - runtime=cast(Runtime, object()), - model_settings={}, - ) - - middleware = AnthropicPromptCachingMiddleware(unsupported_model_behavior="raise") - - def mock_handler(req: ModelRequest) -> AIMessage: - return AIMessage(content="mock response") - - with pytest.raises( - ValueError, - match="AnthropicPromptCachingMiddleware caching middleware only supports Anthropic models. Please install langchain-anthropic.", - ): - middleware.wrap_model_call(fake_request, mock_handler) - - langchain_anthropic = ModuleType("langchain_anthropic") - - class MockChatAnthropic: - pass - - langchain_anthropic.ChatAnthropic = MockChatAnthropic - - with patch.dict("sys.modules", {"langchain_anthropic": langchain_anthropic}): - with pytest.raises( - ValueError, - match="AnthropicPromptCachingMiddleware caching middleware only supports Anthropic models, not instances of", - ): - middleware.wrap_model_call(fake_request, mock_handler) - - middleware = AnthropicPromptCachingMiddleware(unsupported_model_behavior="warn") - - with warnings.catch_warnings(record=True) as w: - result = middleware.wrap_model_call(fake_request, mock_handler) - assert len(w) == 1 - assert ( - "AnthropicPromptCachingMiddleware caching middleware only supports Anthropic models. Please install langchain-anthropic." - in str(w[-1].message) - ) - assert isinstance(result, AIMessage) - - with warnings.catch_warnings(record=True) as w: - with patch.dict("sys.modules", {"langchain_anthropic": langchain_anthropic}): - result = middleware.wrap_model_call(fake_request, mock_handler) - assert isinstance(result, AIMessage) - assert len(w) == 1 - assert ( - "AnthropicPromptCachingMiddleware caching middleware only supports Anthropic models, not instances of" - in str(w[-1].message) - ) - - middleware = AnthropicPromptCachingMiddleware(unsupported_model_behavior="ignore") - - result = middleware.wrap_model_call(fake_request, mock_handler) - assert isinstance(result, AIMessage) - - with patch.dict("sys.modules", {"langchain_anthropic": {"ChatAnthropic": object()}}): - result = middleware.wrap_model_call(fake_request, mock_handler) - assert isinstance(result, AIMessage) + assert captured_request["action_requests"][1]["description"] == "Static description" # Tests for SummarizationMiddleware @@ -1497,37 +1397,48 @@ def test_runtime_injected_into_middleware() -> None: agent.invoke({"messages": [HumanMessage("Hello")]}) +# test setup defined at this scope bc of pydantic issues inferring the namespace of +# custom state w/in a function + + +class CustomState(AgentState): + custom_state: str + + +@tool(description="Test the state") +def test_state_tool( + state: Annotated[CustomState, InjectedState], tool_call_id: Annotated[str, InjectedToolCallId] +) -> str: + """Test tool that accesses injected state.""" + assert "custom_state" in state + return "success" + + +class CustomMiddleware(AgentMiddleware): + state_schema = CustomState + + +agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[ + [{"args": {}, "id": "test_call_1", "name": "test_state_tool"}], + [], + ] + ), + tools=[test_state_tool], + system_prompt="You are a helpful assistant.", + middleware=[CustomMiddleware()], +) + + +@pytest.mark.skipif( + sys.version_info >= (3, 14), reason="pydantic 2.12 namespace management not working w/ 3.14" +) def test_injected_state_in_middleware_agent() -> None: """Test that custom state is properly injected into tools when using middleware.""" - class TestState(AgentState): - test_state: str - - @tool(description="Test the state") - def test_state( - state: Annotated[TestState, InjectedState], tool_call_id: Annotated[str, InjectedToolCallId] - ) -> str: - """Test tool that accesses injected state.""" - assert "test_state" in state - return "success" - - class TestMiddleware(AgentMiddleware): - state_schema = TestState - - agent = create_agent( - model=FakeToolCallingModel( - tool_calls=[ - [{"args": {}, "id": "test_call_1", "name": "test_state"}], - [], - ] - ), - tools=[test_state], - system_prompt="You are a helpful assistant.", - middleware=[TestMiddleware()], - ) - result = agent.invoke( - {"test_state": "I love pizza", "messages": [HumanMessage("Call the test state tool")]} + {"custom_state": "I love pizza", "messages": [HumanMessage("Call the test state tool")]} ) messages = result["messages"] @@ -1538,7 +1449,7 @@ def test_injected_state_in_middleware_agent() -> None: assert len(tool_messages) == 1 tool_message = tool_messages[0] - assert tool_message.name == "test_state" + assert tool_message.name == "test_state_tool" assert "success" in tool_message.content assert tool_message.tool_call_id == "test_call_1" @@ -1558,10 +1469,10 @@ def test_jump_to_is_ephemeral() -> None: assert "jump_to" not in result -# Tests for PlanningMiddleware -def test_planning_middleware_initialization() -> None: - """Test that PlanningMiddleware initializes correctly.""" - middleware = PlanningMiddleware() +# Tests for TodoListMiddleware +def test_todo_middleware_initialization() -> None: + """Test that TodoListMiddleware initializes correctly.""" + middleware = TodoListMiddleware() assert middleware.state_schema == PlanningState assert len(middleware.tools) == 1 assert middleware.tools[0].name == "write_todos" @@ -1574,9 +1485,9 @@ def test_planning_middleware_initialization() -> None: (None, "## `write_todos`"), ], ) -def test_planning_middleware_on_model_call(original_prompt, expected_prompt_prefix) -> None: +def test_todo_middleware_on_model_call(original_prompt, expected_prompt_prefix) -> None: """Test that wrap_model_call handles system prompts correctly.""" - middleware = PlanningMiddleware() + middleware = TodoListMiddleware() model = FakeToolCallingModel() state: PlanningState = {"messages": [HumanMessage(content="Hello")]} @@ -1627,7 +1538,7 @@ def test_planning_middleware_on_model_call(original_prompt, expected_prompt_pref ), ], ) -def test_planning_middleware_write_todos_tool_execution(todos, expected_message) -> None: +def test_todo_middleware_write_todos_tool_execution(todos, expected_message) -> None: """Test that the write_todos tool executes correctly.""" tool_call = { "args": {"todos": todos}, @@ -1647,7 +1558,7 @@ def test_planning_middleware_write_todos_tool_execution(todos, expected_message) [{"status": "pending"}], ], ) -def test_planning_middleware_write_todos_tool_validation_errors(invalid_todos) -> None: +def test_todo_middleware_write_todos_tool_validation_errors(invalid_todos) -> None: """Test that the write_todos tool rejects invalid input.""" tool_call = { "args": {"todos": invalid_todos}, @@ -1659,7 +1570,7 @@ def test_planning_middleware_write_todos_tool_validation_errors(invalid_todos) - write_todos.invoke(tool_call) -def test_planning_middleware_agent_creation_with_middleware() -> None: +def test_todo_middleware_agent_creation_with_middleware() -> None: """Test that an agent can be created with the planning middleware.""" model = FakeToolCallingModel( tool_calls=[ @@ -1690,7 +1601,7 @@ def test_planning_middleware_agent_creation_with_middleware() -> None: [], ] ) - middleware = PlanningMiddleware() + middleware = TodoListMiddleware() agent = create_agent(model=model, middleware=[middleware]) result = agent.invoke({"messages": [HumanMessage("Hello")]}) @@ -1707,12 +1618,14 @@ def test_planning_middleware_agent_creation_with_middleware() -> None: assert len(result["messages"]) == 8 -def test_planning_middleware_custom_system_prompt() -> None: - """Test that PlanningMiddleware can be initialized with custom system prompt.""" +def test_todo_middleware_custom_system_prompt() -> None: + """Test that TodoListMiddleware can be initialized with custom system prompt.""" custom_system_prompt = "Custom todo system prompt for testing" - middleware = PlanningMiddleware(system_prompt=custom_system_prompt) + middleware = TodoListMiddleware(system_prompt=custom_system_prompt) model = FakeToolCallingModel() + state: PlanningState = {"messages": [HumanMessage(content="Hello")]} + request = ModelRequest( model=model, system_prompt="Original prompt", @@ -1721,10 +1634,10 @@ def test_planning_middleware_custom_system_prompt() -> None: tools=[], response_format=None, model_settings={}, + state=state, + runtime=cast(Runtime, object()), ) - state: PlanningState = {"messages": [HumanMessage(content="Hello")]} - def mock_handler(req: ModelRequest) -> AIMessage: return AIMessage(content="mock response") @@ -1734,21 +1647,21 @@ def test_planning_middleware_custom_system_prompt() -> None: assert request.system_prompt == f"Original prompt\n\n{custom_system_prompt}" -def test_planning_middleware_custom_tool_description() -> None: - """Test that PlanningMiddleware can be initialized with custom tool description.""" +def test_todo_middleware_custom_tool_description() -> None: + """Test that TodoListMiddleware can be initialized with custom tool description.""" custom_tool_description = "Custom tool description for testing" - middleware = PlanningMiddleware(tool_description=custom_tool_description) + middleware = TodoListMiddleware(tool_description=custom_tool_description) assert len(middleware.tools) == 1 tool = middleware.tools[0] assert tool.description == custom_tool_description -def test_planning_middleware_custom_system_prompt_and_tool_description() -> None: - """Test that PlanningMiddleware can be initialized with both custom prompts.""" +def test_todo_middleware_custom_system_prompt_and_tool_description() -> None: + """Test that TodoListMiddleware can be initialized with both custom prompts.""" custom_system_prompt = "Custom system prompt" custom_tool_description = "Custom tool description" - middleware = PlanningMiddleware( + middleware = TodoListMiddleware( system_prompt=custom_system_prompt, tool_description=custom_tool_description, ) @@ -1783,9 +1696,9 @@ def test_planning_middleware_custom_system_prompt_and_tool_description() -> None assert tool.description == custom_tool_description -def test_planning_middleware_default_prompts() -> None: - """Test that PlanningMiddleware uses default prompts when none provided.""" - middleware = PlanningMiddleware() +def test_todo_middleware_default_prompts() -> None: + """Test that TodoListMiddleware uses default prompts when none provided.""" + middleware = TodoListMiddleware() # Verify default system prompt assert middleware.system_prompt == WRITE_TODOS_SYSTEM_PROMPT @@ -1797,9 +1710,9 @@ def test_planning_middleware_default_prompts() -> None: assert tool.description == WRITE_TODOS_TOOL_DESCRIPTION -def test_planning_middleware_custom_system_prompt() -> None: +def test_todo_middleware_custom_system_prompt_in_agent() -> None: """Test that custom tool executes correctly in an agent.""" - middleware = PlanningMiddleware(system_prompt="call the write_todos tool") + middleware = TodoListMiddleware(system_prompt="call the write_todos tool") model = FakeToolCallingModel( tool_calls=[ @@ -2189,20 +2102,28 @@ async def test_create_agent_mixed_sync_async_middleware() -> None: """Test async invoke with mixed sync and async middleware.""" calls = [] - class SyncMiddleware(AgentMiddleware): + class MostlySyncMiddleware(AgentMiddleware): def before_model(self, state, runtime) -> None: - calls.append("SyncMiddleware.before_model") + calls.append("MostlySyncMiddleware.before_model") def wrap_model_call( self, request: ModelRequest, handler: Callable[[ModelRequest], AIMessage], ) -> AIMessage: - calls.append("SyncMiddleware.wrap_model_call") + calls.append("MostlySyncMiddleware.wrap_model_call") return handler(request) + async def awrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelCallResult: + calls.append("MostlySyncMiddleware.awrap_model_call") + return await handler(request) + def after_model(self, state, runtime) -> None: - calls.append("SyncMiddleware.after_model") + calls.append("MostlySyncMiddleware.after_model") class AsyncMiddleware(AgentMiddleware): async def abefore_model(self, state, runtime) -> None: @@ -2223,20 +2144,21 @@ async def test_create_agent_mixed_sync_async_middleware() -> None: model=FakeToolCallingModel(), tools=[], system_prompt="You are a helpful assistant.", - middleware=[SyncMiddleware(), AsyncMiddleware()], + middleware=[MostlySyncMiddleware(), AsyncMiddleware()], ) - result = await agent.ainvoke({"messages": [HumanMessage("hello")]}) + await agent.ainvoke({"messages": [HumanMessage("hello")]}) # In async mode, both sync and async middleware should work # Note: Sync wrap_model_call is not called when running in async mode, # as the async version is preferred assert calls == [ - "SyncMiddleware.before_model", + "MostlySyncMiddleware.before_model", "AsyncMiddleware.abefore_model", + "MostlySyncMiddleware.awrap_model_call", "AsyncMiddleware.awrap_model_call", "AsyncMiddleware.aafter_model", - "SyncMiddleware.after_model", + "MostlySyncMiddleware.after_model", ] @@ -2561,8 +2483,10 @@ def test_wrap_model_call_rewrite_response() -> None: def wrap_model_call(self, request, handler): result = handler(request) + # result is ModelResponse, extract AIMessage from it + ai_message = result.result[0] # Rewrite the response - return AIMessage(content=f"REWRITTEN: {result.content}") + return AIMessage(content=f"REWRITTEN: {ai_message.content}") model = SimpleModel() middleware = ResponseRewriteMiddleware() @@ -2629,10 +2553,8 @@ def test_create_agent_sync_invoke_with_only_async_middleware_raises_error() -> N middleware=[AsyncOnlyMiddleware()], ) - # This should work now via run_in_executor - result = agent.invoke({"messages": [HumanMessage("hello")]}) - assert result is not None - assert "messages" in result + with pytest.raises(NotImplementedError): + agent.invoke({"messages": [HumanMessage("hello")]}) def test_create_agent_sync_invoke_with_mixed_middleware() -> None: diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_middleware_decorators.py b/libs/langchain_v1/tests/unit_tests/agents/test_middleware_decorators.py index 7663616bbcd..b8969bcaa2e 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/test_middleware_decorators.py +++ b/libs/langchain_v1/tests/unit_tests/agents/test_middleware_decorators.py @@ -18,6 +18,7 @@ from langchain.agents.middleware.types import ( after_model, dynamic_prompt, wrap_model_call, + wrap_tool_call, hook_config, ) from langchain.agents.factory import create_agent, _get_can_jump_to @@ -356,16 +357,16 @@ async def test_mixed_sync_async_decorators_integration() -> None: call_order.append("async_before") return None - @wrap_model_call - def track_sync_on_call(request, handler): - call_order.append("sync_on_call") - return handler(request) - @wrap_model_call async def track_async_on_call(request, handler): call_order.append("async_on_call") return await handler(request) + @wrap_tool_call + async def track_sync_on_tool_call(request, handler): + call_order.append("async_on_tool_call") + return await handler(request) + @after_model async def track_async_after(state: AgentState, runtime: Runtime) -> None: call_order.append("async_after") @@ -381,8 +382,8 @@ async def test_mixed_sync_async_decorators_integration() -> None: middleware=[ track_sync_before, track_async_before, - track_sync_on_call, track_async_on_call, + track_sync_on_tool_call, track_async_after, track_sync_after, ], @@ -390,8 +391,9 @@ async def test_mixed_sync_async_decorators_integration() -> None: # Agent is already compiled await agent.ainvoke({"messages": [HumanMessage("Hello")]}) - # In async mode, only async wrap_model_call is invoked (not sync) - # Sync before_model and after_model are still called via run_in_executor + # In async mode, we can automatically delegate to sync middleware for nodes + # (although we cannot delegate to sync middleware for model call or tool call) + assert call_order == [ "sync_before", "async_before", @@ -714,3 +716,51 @@ def test_dynamic_prompt_multiple_in_sequence() -> None: result = agent.invoke({"messages": [HumanMessage("Hello")]}) assert result["messages"][-1].content == "Second prompt.-Hello" + + +def test_async_dynamic_prompt_skipped_on_sync_invoke() -> None: + """Test that async dynamic_prompt raises NotImplementedError when invoked via sync path (.invoke). + + When an async-only middleware is defined, it cannot be called from the sync path. + The framework will raise NotImplementedError when trying to invoke the sync method. + """ + calls = [] + + @dynamic_prompt + async def async_only_prompt(request: ModelRequest) -> str: + calls.append("async_prompt") + return "Async prompt" + + agent = create_agent(model=FakeToolCallingModel(), middleware=[async_only_prompt]) + + # Async-only middleware raises NotImplementedError in sync path + with pytest.raises(NotImplementedError): + agent.invoke({"messages": [HumanMessage("Hello")]}) + + # The async prompt was not called + assert calls == [] + + +async def test_sync_dynamic_prompt_on_async_invoke() -> None: + """Test that sync dynamic_prompt works when invoked via async path (.ainvoke). + + When a sync middleware is defined with @dynamic_prompt, it automatically creates + both sync and async implementations. The async implementation delegates to the + sync function, allowing the middleware to work in both sync and async contexts. + """ + calls = [] + + @dynamic_prompt + def sync_prompt(request: ModelRequest) -> str: + calls.append("sync_prompt") + return "Sync prompt" + + agent = create_agent(model=FakeToolCallingModel(), middleware=[sync_prompt]) + + # Sync dynamic_prompt now works in async path via delegation + result = await agent.ainvoke({"messages": [HumanMessage("Hello")]}) + + # The sync prompt function was called via async delegation + assert calls == ["sync_prompt"] + # The model executed with the custom prompt + assert result["messages"][-1].content == "Sync prompt-Hello" diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_middleware_tools.py b/libs/langchain_v1/tests/unit_tests/agents/test_middleware_tools.py index b9a9336a3df..3bccc7ba342 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/test_middleware_tools.py +++ b/libs/langchain_v1/tests/unit_tests/agents/test_middleware_tools.py @@ -1,15 +1,15 @@ """Test Middleware handling of tools in agents.""" -import pytest from collections.abc import Callable -from langchain.agents.middleware.types import AgentMiddleware, AgentState, ModelRequest +import pytest + from langchain.agents.factory import create_agent -from langchain.tools import ToolNode +from langchain.agents.middleware.types import AgentMiddleware, AgentState, ModelRequest +from langgraph.prebuilt.tool_node import ToolNode from langchain_core.messages import AIMessage, HumanMessage, ToolMessage from langchain_core.tools import tool from .model import FakeToolCallingModel -from langgraph.runtime import Runtime def test_model_request_tools_are_base_tools() -> None: @@ -105,7 +105,6 @@ def test_middleware_can_modify_tools() -> None: def test_unknown_tool_raises_error() -> None: """Test that using an unknown tool in ModelRequest raises a clear error.""" - from langchain_core.tools import BaseTool @tool def known_tool(input: str) -> str: diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_model_fallback_middleware.py b/libs/langchain_v1/tests/unit_tests/agents/test_model_fallback_middleware.py new file mode 100644 index 00000000000..2c24296fc1a --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/test_model_fallback_middleware.py @@ -0,0 +1,215 @@ +"""Unit tests for ModelFallbackMiddleware.""" + +from __future__ import annotations + +from typing import cast + +import pytest +from langchain_core.language_models.fake_chat_models import GenericFakeChatModel +from langchain_core.messages import AIMessage + +from langchain.agents.middleware.model_fallback import ModelFallbackMiddleware +from langchain.agents.middleware.types import ModelRequest, ModelResponse +from langgraph.runtime import Runtime + + +def _fake_runtime() -> Runtime: + return cast(Runtime, object()) + + +def _make_request() -> ModelRequest: + """Create a minimal ModelRequest for testing.""" + model = GenericFakeChatModel(messages=iter([AIMessage(content="primary")])) + return ModelRequest( + model=model, + system_prompt=None, + messages=[], + tool_choice=None, + tools=[], + response_format=None, + state=cast("AgentState", {}), # type: ignore[name-defined] + runtime=_fake_runtime(), + model_settings={}, + ) + + +def test_primary_model_succeeds() -> None: + """Test that primary model is used when it succeeds.""" + primary_model = GenericFakeChatModel(messages=iter([AIMessage(content="primary response")])) + fallback_model = GenericFakeChatModel(messages=iter([AIMessage(content="fallback response")])) + + middleware = ModelFallbackMiddleware(fallback_model) + request = _make_request() + request.model = primary_model + + def mock_handler(req: ModelRequest) -> ModelResponse: + # Simulate successful model call + result = req.model.invoke([]) + return ModelResponse(result=[result]) + + response = middleware.wrap_model_call(request, mock_handler) + + assert isinstance(response, ModelResponse) + assert response.result[0].content == "primary response" + + +def test_fallback_on_primary_failure() -> None: + """Test that fallback model is used when primary fails.""" + + class FailingPrimaryModel(GenericFakeChatModel): + def _generate(self, messages, **kwargs): + raise ValueError("Primary model failed") + + primary_model = FailingPrimaryModel(messages=iter([AIMessage(content="should not see")])) + fallback_model = GenericFakeChatModel(messages=iter([AIMessage(content="fallback response")])) + + middleware = ModelFallbackMiddleware(fallback_model) + request = _make_request() + request.model = primary_model + + def mock_handler(req: ModelRequest) -> ModelResponse: + result = req.model.invoke([]) + return ModelResponse(result=[result]) + + response = middleware.wrap_model_call(request, mock_handler) + + assert isinstance(response, ModelResponse) + assert response.result[0].content == "fallback response" + + +def test_multiple_fallbacks() -> None: + """Test that multiple fallback models are tried in sequence.""" + + class FailingModel(GenericFakeChatModel): + def _generate(self, messages, **kwargs): + raise ValueError("Model failed") + + primary_model = FailingModel(messages=iter([AIMessage(content="should not see")])) + fallback1 = FailingModel(messages=iter([AIMessage(content="fallback1")])) + fallback2 = GenericFakeChatModel(messages=iter([AIMessage(content="fallback2")])) + + middleware = ModelFallbackMiddleware(fallback1, fallback2) + request = _make_request() + request.model = primary_model + + def mock_handler(req: ModelRequest) -> ModelResponse: + result = req.model.invoke([]) + return ModelResponse(result=[result]) + + response = middleware.wrap_model_call(request, mock_handler) + + assert isinstance(response, ModelResponse) + assert response.result[0].content == "fallback2" + + +def test_all_models_fail() -> None: + """Test that exception is raised when all models fail.""" + + class AlwaysFailingModel(GenericFakeChatModel): + def _generate(self, messages, **kwargs): + raise ValueError("Model failed") + + primary_model = AlwaysFailingModel(messages=iter([])) + fallback_model = AlwaysFailingModel(messages=iter([])) + + middleware = ModelFallbackMiddleware(fallback_model) + request = _make_request() + request.model = primary_model + + def mock_handler(req: ModelRequest) -> ModelResponse: + result = req.model.invoke([]) + return ModelResponse(result=[result]) + + with pytest.raises(ValueError, match="Model failed"): + middleware.wrap_model_call(request, mock_handler) + + +async def test_primary_model_succeeds_async() -> None: + """Test async version - primary model is used when it succeeds.""" + primary_model = GenericFakeChatModel(messages=iter([AIMessage(content="primary response")])) + fallback_model = GenericFakeChatModel(messages=iter([AIMessage(content="fallback response")])) + + middleware = ModelFallbackMiddleware(fallback_model) + request = _make_request() + request.model = primary_model + + async def mock_handler(req: ModelRequest) -> ModelResponse: + # Simulate successful async model call + result = await req.model.ainvoke([]) + return ModelResponse(result=[result]) + + response = await middleware.awrap_model_call(request, mock_handler) + + assert isinstance(response, ModelResponse) + assert response.result[0].content == "primary response" + + +async def test_fallback_on_primary_failure_async() -> None: + """Test async version - fallback model is used when primary fails.""" + + class AsyncFailingPrimaryModel(GenericFakeChatModel): + async def _agenerate(self, messages, **kwargs): + raise ValueError("Primary model failed") + + primary_model = AsyncFailingPrimaryModel(messages=iter([AIMessage(content="should not see")])) + fallback_model = GenericFakeChatModel(messages=iter([AIMessage(content="fallback response")])) + + middleware = ModelFallbackMiddleware(fallback_model) + request = _make_request() + request.model = primary_model + + async def mock_handler(req: ModelRequest) -> ModelResponse: + result = await req.model.ainvoke([]) + return ModelResponse(result=[result]) + + response = await middleware.awrap_model_call(request, mock_handler) + + assert isinstance(response, ModelResponse) + assert response.result[0].content == "fallback response" + + +async def test_multiple_fallbacks_async() -> None: + """Test async version - multiple fallback models are tried in sequence.""" + + class AsyncFailingModel(GenericFakeChatModel): + async def _agenerate(self, messages, **kwargs): + raise ValueError("Model failed") + + primary_model = AsyncFailingModel(messages=iter([AIMessage(content="should not see")])) + fallback1 = AsyncFailingModel(messages=iter([AIMessage(content="fallback1")])) + fallback2 = GenericFakeChatModel(messages=iter([AIMessage(content="fallback2")])) + + middleware = ModelFallbackMiddleware(fallback1, fallback2) + request = _make_request() + request.model = primary_model + + async def mock_handler(req: ModelRequest) -> ModelResponse: + result = await req.model.ainvoke([]) + return ModelResponse(result=[result]) + + response = await middleware.awrap_model_call(request, mock_handler) + + assert isinstance(response, ModelResponse) + assert response.result[0].content == "fallback2" + + +async def test_all_models_fail_async() -> None: + """Test async version - exception is raised when all models fail.""" + + class AsyncAlwaysFailingModel(GenericFakeChatModel): + async def _agenerate(self, messages, **kwargs): + raise ValueError("Model failed") + + primary_model = AsyncAlwaysFailingModel(messages=iter([])) + fallback_model = AsyncAlwaysFailingModel(messages=iter([])) + + middleware = ModelFallbackMiddleware(fallback_model) + request = _make_request() + request.model = primary_model + + async def mock_handler(req: ModelRequest) -> ModelResponse: + result = await req.model.ainvoke([]) + return ModelResponse(result=[result]) + + with pytest.raises(ValueError, match="Model failed"): + await middleware.awrap_model_call(request, mock_handler) diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_on_tool_call_middleware.py b/libs/langchain_v1/tests/unit_tests/agents/test_on_tool_call_middleware.py index bdd01ed7008..e936562ac03 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/test_on_tool_call_middleware.py +++ b/libs/langchain_v1/tests/unit_tests/agents/test_on_tool_call_middleware.py @@ -10,7 +10,7 @@ from langgraph.types import Command from langchain.agents.factory import create_agent from langchain.agents.middleware.types import AgentMiddleware -from langchain.tools.tool_node import ToolCallRequest +from langchain.agents.middleware.types import ToolCallRequest from tests.unit_tests.agents.test_middleware_agent import FakeToolCallingModel @@ -368,15 +368,15 @@ def test_middleware_access_to_state() -> None: {"configurable": {"thread_id": "test"}}, ) - # Middleware should have seen state (state is passed to on_tool_call) + # Middleware should have seen state (state is passed to wrap_tool) assert len(state_seen) >= 1 -def test_middleware_without_on_tool_call() -> None: - """Test that middleware without on_tool_call hook works normally.""" +def test_middleware_without_wrap_tool() -> None: + """Test that middleware without wrap_tool hook works normally.""" class NoOpMiddleware(AgentMiddleware): - """Middleware without on_tool_call.""" + """Middleware without wrap_tool.""" def before_model(self, state, runtime): """Just a dummy hook.""" diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_parallel_tool_call_limits.py b/libs/langchain_v1/tests/unit_tests/agents/test_parallel_tool_call_limits.py new file mode 100644 index 00000000000..141dcc9ac89 --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/test_parallel_tool_call_limits.py @@ -0,0 +1,192 @@ +"""Tests for parallel tool call handling in ToolCallLimitMiddleware.""" + +import pytest +from langchain_core.messages import AIMessage, HumanMessage, ToolCall, ToolMessage +from langchain_core.tools import tool +from langgraph.checkpoint.memory import InMemorySaver + +from langchain.agents.factory import create_agent +from langchain.agents.middleware.tool_call_limit import ToolCallLimitMiddleware +from tests.unit_tests.agents.model import FakeToolCallingModel + + +def test_parallel_tool_calls_with_limit_continue_mode(): + """Test parallel tool calls with a limit of 1 in 'continue' mode. + + When the model proposes 3 tool calls with a limit of 1: + - The first call should execute successfully + - The 2nd and 3rd calls should be blocked with error ToolMessages + - Execution should continue (no jump_to) + """ + + @tool + def search(query: str) -> str: + """Search for information.""" + return f"Results: {query}" + + # Model proposes 3 parallel search calls in a single AIMessage + model = FakeToolCallingModel( + tool_calls=[ + [ + ToolCall(name="search", args={"query": "q1"}, id="1"), + ToolCall(name="search", args={"query": "q2"}, id="2"), + ToolCall(name="search", args={"query": "q3"}, id="3"), + ], + [], # Model stops after seeing the errors + ] + ) + + limiter = ToolCallLimitMiddleware(thread_limit=1, exit_behavior="continue") + agent = create_agent( + model=model, tools=[search], middleware=[limiter], checkpointer=InMemorySaver() + ) + + result = agent.invoke( + {"messages": [HumanMessage("Test")]}, {"configurable": {"thread_id": "test"}} + ) + messages = result["messages"] + + # Verify tool message counts + tool_messages = [msg for msg in messages if isinstance(msg, ToolMessage)] + successful_tool_messages = [msg for msg in tool_messages if msg.status != "error"] + error_tool_messages = [msg for msg in tool_messages if msg.status == "error"] + + assert len(successful_tool_messages) == 1, "Should have 1 successful tool message (q1)" + assert len(error_tool_messages) == 2, "Should have 2 blocked tool messages (q2, q3)" + + # Verify the successful call is q1 + assert "q1" in successful_tool_messages[0].content + + # Verify error messages explain the limit + for error_msg in error_tool_messages: + assert "limit" in error_msg.content.lower() + + # Verify execution continued (no early termination) + ai_messages = [msg for msg in messages if isinstance(msg, AIMessage)] + # Should have: initial AI message with 3 tool calls, then final AI message (no tool calls) + assert len(ai_messages) >= 2 + + +def test_parallel_tool_calls_with_limit_end_mode(): + """Test parallel tool calls with a limit of 1 in 'end' mode. + + When the model proposes 3 tool calls with a limit of 1: + - The first call would be allowed (within limit) + - The 2nd and 3rd calls exceed the limit and get blocked with error ToolMessages + - Execution stops immediately (jump_to: end) so NO tools actually execute + - An AI message explains why execution stopped + """ + + @tool + def search(query: str) -> str: + """Search for information.""" + return f"Results: {query}" + + # Model proposes 3 parallel search calls + model = FakeToolCallingModel( + tool_calls=[ + [ + ToolCall(name="search", args={"query": "q1"}, id="1"), + ToolCall(name="search", args={"query": "q2"}, id="2"), + ToolCall(name="search", args={"query": "q3"}, id="3"), + ], + [], + ] + ) + + limiter = ToolCallLimitMiddleware(thread_limit=1, exit_behavior="end") + agent = create_agent( + model=model, tools=[search], middleware=[limiter], checkpointer=InMemorySaver() + ) + + result = agent.invoke( + {"messages": [HumanMessage("Test")]}, {"configurable": {"thread_id": "test"}} + ) + messages = result["messages"] + + # Verify tool message counts + # With "end" behavior, when we jump to end, NO tools execute (not even allowed ones) + # We only get error ToolMessages for the 2 blocked calls + tool_messages = [msg for msg in messages if isinstance(msg, ToolMessage)] + successful_tool_messages = [msg for msg in tool_messages if msg.status != "error"] + error_tool_messages = [msg for msg in tool_messages if msg.status == "error"] + + assert len(successful_tool_messages) == 0, "No tools execute when we jump to end" + assert len(error_tool_messages) == 2, "Should have 2 blocked tool messages (q2, q3)" + + # Verify error tool messages (sent to model - include "Do not" instruction) + for error_msg in error_tool_messages: + assert "Tool call limit exceeded" in error_msg.content + assert "Do not" in error_msg.content + + # Verify AI message explaining why execution stopped (displayed to user - includes thread/run details) + ai_limit_messages = [ + msg + for msg in messages + if isinstance(msg, AIMessage) and "limit" in msg.content.lower() and not msg.tool_calls + ] + assert len(ai_limit_messages) == 1, "Should have exactly one AI message explaining the limit" + + ai_msg_content = ai_limit_messages[0].content.lower() + assert "thread limit exceeded" in ai_msg_content or "run limit exceeded" in ai_msg_content, ( + "AI message should include thread/run limit details for the user" + ) + + +def test_parallel_mixed_tool_calls_with_specific_tool_limit(): + """Test parallel calls to different tools when limiting a specific tool. + + When limiting 'search' to 1 call, and model proposes 3 search + 2 calculator calls: + - First search call should execute + - Other 2 search calls should be blocked + - All calculator calls should execute (not limited) + """ + + @tool + def search(query: str) -> str: + """Search for information.""" + return f"Search: {query}" + + @tool + def calculator(expression: str) -> str: + """Calculate an expression.""" + return f"Calc: {expression}" + + model = FakeToolCallingModel( + tool_calls=[ + [ + ToolCall(name="search", args={"query": "q1"}, id="1"), + ToolCall(name="calculator", args={"expression": "1+1"}, id="2"), + ToolCall(name="search", args={"query": "q2"}, id="3"), + ToolCall(name="calculator", args={"expression": "2+2"}, id="4"), + ToolCall(name="search", args={"query": "q3"}, id="5"), + ], + [], + ] + ) + + search_limiter = ToolCallLimitMiddleware( + tool_name="search", thread_limit=1, exit_behavior="continue" + ) + agent = create_agent( + model=model, + tools=[search, calculator], + middleware=[search_limiter], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Test")]}, {"configurable": {"thread_id": "test"}} + ) + messages = result["messages"] + + tool_messages = [msg for msg in messages if isinstance(msg, ToolMessage)] + search_success = [m for m in tool_messages if "Search:" in m.content] + search_blocked = [ + m for m in tool_messages if "limit" in m.content.lower() and "search" in m.content.lower() + ] + calc_success = [m for m in tool_messages if "Calc:" in m.content] + + assert len(search_success) == 1, "Should have 1 successful search call" + assert len(search_blocked) == 2, "Should have 2 blocked search calls" + assert len(calc_success) == 2, "All calculator calls should succeed (not limited)" diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_response_format.py b/libs/langchain_v1/tests/unit_tests/agents/test_response_format.py index a7963ced16f..7df5c23463b 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/test_response_format.py +++ b/libs/langchain_v1/tests/unit_tests/agents/test_response_format.py @@ -610,6 +610,35 @@ class TestResponseFormatAsToolStrategy: ) assert response["structured_response"] == EXPECTED_WEATHER_PYDANTIC + def test_validation_error_with_invalid_response(self) -> None: + """Test that StructuredOutputValidationError is raised when tool strategy receives invalid response.""" + tool_calls = [ + [ + { + "name": "WeatherBaseModel", + "id": "1", + "args": {"invalid_field": "wrong_data", "another_bad_field": 123}, + }, + ], + ] + + model = FakeToolCallingModel(tool_calls=tool_calls) + + agent = create_agent( + model, + [], + response_format=ToolStrategy( + WeatherBaseModel, + handle_errors=False, # Disable retry to ensure error is raised + ), + ) + + with pytest.raises( + StructuredOutputValidationError, + match=".*WeatherBaseModel.*", + ): + agent.invoke({"messages": [HumanMessage("What's the weather?")]}) + class TestResponseFormatAsProviderStrategy: def test_pydantic_model(self) -> None: @@ -630,6 +659,28 @@ class TestResponseFormatAsProviderStrategy: assert response["structured_response"] == EXPECTED_WEATHER_PYDANTIC assert len(response["messages"]) == 4 + def test_validation_error_with_invalid_response(self) -> None: + """Test that StructuredOutputValidationError is raised when provider strategy receives invalid response.""" + tool_calls = [ + [{"args": {}, "id": "1", "name": "get_weather"}], + ] + + # But we're using WeatherBaseModel which has different field requirements + model = FakeToolCallingModel[dict]( + tool_calls=tool_calls, + structured_response={"invalid": "data"}, # Wrong structure + ) + + agent = create_agent( + model, [get_weather], response_format=ProviderStrategy(WeatherBaseModel) + ) + + with pytest.raises( + StructuredOutputValidationError, + match=".*WeatherBaseModel.*", + ): + agent.invoke({"messages": [HumanMessage("What's the weather?")]}) + def test_dataclass(self) -> None: """Test response_format as ProviderStrategy with dataclass.""" tool_calls = [ diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_return_direct_graph.py b/libs/langchain_v1/tests/unit_tests/agents/test_return_direct_graph.py new file mode 100644 index 00000000000..6907aa01a8c --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/test_return_direct_graph.py @@ -0,0 +1,73 @@ +"""Tests for return_direct tool graph structure.""" + +from langchain_core.tools import tool +from syrupy.assertion import SnapshotAssertion + +from langchain.agents.factory import create_agent + +from .model import FakeToolCallingModel + + +def test_agent_graph_without_return_direct_tools(snapshot: SnapshotAssertion) -> None: + """Test that graph WITHOUT return_direct tools does NOT have edge from tools to end.""" + + @tool + def normal_tool(input_string: str) -> str: + """A normal tool without return_direct.""" + return input_string + + agent = create_agent( + model=FakeToolCallingModel(), + tools=[normal_tool], + system_prompt="You are a helpful assistant.", + ) + + # The mermaid diagram should NOT include an edge from tools to __end__ + # when no tools have return_direct=True + mermaid_diagram = agent.get_graph().draw_mermaid() + assert mermaid_diagram == snapshot + + +def test_agent_graph_with_return_direct_tool(snapshot: SnapshotAssertion) -> None: + """Test that graph WITH return_direct tools has correct edge from tools to end.""" + + @tool(return_direct=True) + def return_direct_tool(input_string: str) -> str: + """A tool with return_direct=True.""" + return input_string + + agent = create_agent( + model=FakeToolCallingModel(), + tools=[return_direct_tool], + system_prompt="You are a helpful assistant.", + ) + + # The mermaid diagram SHOULD include an edge from tools to __end__ + # when at least one tool has return_direct=True + mermaid_diagram = agent.get_graph().draw_mermaid() + assert mermaid_diagram == snapshot + + +def test_agent_graph_with_mixed_tools(snapshot: SnapshotAssertion) -> None: + """Test that graph with mixed tools (some return_direct, some not) has correct edges.""" + + @tool(return_direct=True) + def return_direct_tool(input_string: str) -> str: + """A tool with return_direct=True.""" + return input_string + + @tool + def normal_tool(input_string: str) -> str: + """A normal tool without return_direct.""" + return input_string + + agent = create_agent( + model=FakeToolCallingModel(), + tools=[return_direct_tool, normal_tool], + system_prompt="You are a helpful assistant.", + ) + + # The mermaid diagram SHOULD include an edge from tools to __end__ + # because at least one tool has return_direct=True + mermaid_diagram = agent.get_graph().draw_mermaid() + assert mermaid_diagram == snapshot diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_state_schema.py b/libs/langchain_v1/tests/unit_tests/agents/test_state_schema.py new file mode 100644 index 00000000000..d3962c21741 --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/test_state_schema.py @@ -0,0 +1,188 @@ +"""Test state_schema parameter in create_agent. + +This module tests that the state_schema parameter allows users to extend +AgentState without needing to create custom middleware. +""" + +from __future__ import annotations + +from typing import Any + +import pytest + +from langchain_core.messages import HumanMessage +from langchain_core.tools import tool + +from langchain.agents import create_agent +from langchain.agents.middleware.types import AgentMiddleware, AgentState +from langchain.tools import ToolRuntime + +from .model import FakeToolCallingModel + + +@tool +def simple_tool(x: int) -> str: + """Simple tool for basic tests.""" + return f"Result: {x}" + + +def test_state_schema_single_custom_field() -> None: + """Test that a single custom state field is preserved through agent execution.""" + + class CustomState(AgentState): + custom_field: str + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[[{"args": {"x": 1}, "id": "call_1", "name": "simple_tool"}], []] + ), + tools=[simple_tool], + state_schema=CustomState, + ) + + result = agent.invoke({"messages": [HumanMessage("Test")], "custom_field": "test_value"}) + + assert result["custom_field"] == "test_value" + assert len(result["messages"]) == 4 + + +def test_state_schema_multiple_custom_fields() -> None: + """Test that multiple custom state fields are preserved through agent execution.""" + + class CustomState(AgentState): + user_id: str + session_id: str + context: str + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[[{"args": {"x": 1}, "id": "call_1", "name": "simple_tool"}], []] + ), + tools=[simple_tool], + state_schema=CustomState, + ) + + result = agent.invoke( + { + "messages": [HumanMessage("Test")], + "user_id": "user_123", + "session_id": "session_456", + "context": "test_ctx", + } + ) + + assert result["user_id"] == "user_123" + assert result["session_id"] == "session_456" + assert result["context"] == "test_ctx" + assert len(result["messages"]) == 4 + + +def test_state_schema_with_tool_runtime() -> None: + """Test that custom state fields are accessible via ToolRuntime.""" + + class ExtendedState(AgentState): + counter: int + + runtime_data = {} + + @tool + def counter_tool(x: int, runtime: ToolRuntime) -> str: + """Tool that accesses custom state field.""" + runtime_data["counter"] = runtime.state["counter"] + return f"Counter is {runtime_data['counter']}, x is {x}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[[{"args": {"x": 10}, "id": "call_1", "name": "counter_tool"}], []] + ), + tools=[counter_tool], + state_schema=ExtendedState, + ) + + result = agent.invoke({"messages": [HumanMessage("Test")], "counter": 5}) + + assert runtime_data["counter"] == 5 + assert "Counter is 5" in result["messages"][2].content + + +def test_state_schema_with_middleware() -> None: + """Test that state_schema merges with middleware state schemas.""" + + class UserState(AgentState): + user_name: str + + class MiddlewareState(AgentState): + middleware_data: str + + middleware_calls = [] + + class TestMiddleware(AgentMiddleware): + state_schema = MiddlewareState + + def before_model(self, state, runtime) -> dict[str, Any]: + middleware_calls.append(state.get("middleware_data", "")) + return {} + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[[{"args": {"x": 5}, "id": "call_1", "name": "simple_tool"}], []] + ), + tools=[simple_tool], + state_schema=UserState, + middleware=[TestMiddleware()], + ) + + result = agent.invoke( + { + "messages": [HumanMessage("Test")], + "user_name": "Alice", + "middleware_data": "test_data", + } + ) + + assert result["user_name"] == "Alice" + assert result["middleware_data"] == "test_data" + assert "test_data" in middleware_calls + + +def test_state_schema_none_uses_default() -> None: + """Test that state_schema=None uses default AgentState.""" + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[[{"args": {"x": 1}, "id": "call_1", "name": "simple_tool"}], []] + ), + tools=[simple_tool], + state_schema=None, + ) + + result = agent.invoke({"messages": [HumanMessage("Test")]}) + + assert len(result["messages"]) == 4 + assert "Result: 1" in result["messages"][2].content + + +async def test_state_schema_async() -> None: + """Test that state_schema works with async agents.""" + + class AsyncState(AgentState): + async_field: str + + @tool + async def async_tool(x: int) -> str: + """Async tool.""" + return f"Async: {x}" + + agent = create_agent( + model=FakeToolCallingModel( + tool_calls=[[{"args": {"x": 99}, "id": "call_1", "name": "async_tool"}], []] + ), + tools=[async_tool], + state_schema=AsyncState, + ) + + result = await agent.ainvoke( + {"messages": [HumanMessage("Test async")], "async_field": "async_value"} + ) + + assert result["async_field"] == "async_value" + assert "Async: 99" in result["messages"][2].content diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_sync_async_tool_wrapper_composition.py b/libs/langchain_v1/tests/unit_tests/agents/test_sync_async_tool_wrapper_composition.py new file mode 100644 index 00000000000..c565719d3f2 --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/test_sync_async_tool_wrapper_composition.py @@ -0,0 +1,427 @@ +"""Tests for sync/async middleware composition with wrap_tool_call and awrap_tool_call. + +These tests verify the desired behavior: +1. If middleware defines both sync and async -> use both on respective paths +2. If middleware defines only sync -> use on sync path, raise NotImplementedError on async path +3. If middleware defines only async -> use on async path, raise NotImplementedError on sync path +""" + +import pytest +from langchain_core.messages import HumanMessage, ToolCall, ToolMessage +from langchain_core.tools import tool +from langgraph.checkpoint.memory import InMemorySaver + +from langchain.agents.factory import create_agent +from langchain.agents.middleware.types import AgentMiddleware, wrap_tool_call +from langchain.agents.middleware.types import ToolCallRequest +from tests.unit_tests.agents.test_middleware_agent import FakeToolCallingModel + + +@tool +def search(query: str) -> str: + """Search for information.""" + return f"Results for: {query}" + + +@tool +def calculator(expression: str) -> str: + """Calculate an expression.""" + return f"Calculated: {expression}" + + +class TestSyncAsyncMiddlewareComposition: + """Test sync/async middleware composition behavior.""" + + def test_sync_only_middleware_works_on_sync_path(self) -> None: + """Middleware with only sync wrap_tool_call works on sync path.""" + call_log = [] + + class SyncOnlyMiddleware(AgentMiddleware): + def wrap_tool_call(self, request, handler): + call_log.append("sync_called") + return handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[SyncOnlyMiddleware()], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test"}}, + ) + + assert "sync_called" in call_log + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + assert "Results for: test" in tool_messages[0].content + + async def test_sync_only_middleware_raises_on_async_path(self) -> None: + """Middleware with only sync wrap_tool_call raises NotImplementedError on async path.""" + + class SyncOnlyMiddleware(AgentMiddleware): + def wrap_tool_call(self, request, handler): + return handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[SyncOnlyMiddleware()], + checkpointer=InMemorySaver(), + ) + + # Should raise NotImplementedError because SyncOnlyMiddleware doesn't support async path + with pytest.raises(NotImplementedError): + await agent.ainvoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test"}}, + ) + + async def test_async_only_middleware_works_on_async_path(self) -> None: + """Middleware with only async awrap_tool_call works on async path.""" + call_log = [] + + class AsyncOnlyMiddleware(AgentMiddleware): + async def awrap_tool_call(self, request, handler): + call_log.append("async_called") + return await handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[AsyncOnlyMiddleware()], + checkpointer=InMemorySaver(), + ) + + result = await agent.ainvoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test"}}, + ) + + assert "async_called" in call_log + tool_messages = [m for m in result["messages"] if isinstance(m, ToolMessage)] + assert len(tool_messages) == 1 + assert "Results for: test" in tool_messages[0].content + + def test_async_only_middleware_raises_on_sync_path(self) -> None: + """Middleware with only async awrap_tool_call raises NotImplementedError on sync path.""" + + class AsyncOnlyMiddleware(AgentMiddleware): + async def awrap_tool_call(self, request, handler): + return await handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[AsyncOnlyMiddleware()], + checkpointer=InMemorySaver(), + ) + + with pytest.raises(NotImplementedError): + agent.invoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test"}}, + ) + + def test_both_sync_and_async_middleware_uses_appropriate_path(self) -> None: + """Middleware with both sync and async uses correct implementation per path.""" + call_log = [] + + class BothSyncAsyncMiddleware(AgentMiddleware): + def wrap_tool_call(self, request, handler): + call_log.append("sync_called") + return handler(request) + + async def awrap_tool_call(self, request, handler): + call_log.append("async_called") + return await handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[BothSyncAsyncMiddleware()], + checkpointer=InMemorySaver(), + ) + + # Sync path + call_log.clear() + result_sync = agent.invoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test1"}}, + ) + assert "sync_called" in call_log + assert "async_called" not in call_log + + async def test_both_sync_and_async_middleware_uses_appropriate_path_async( + self, + ) -> None: + """Middleware with both sync and async uses correct implementation per path (async).""" + call_log = [] + + class BothSyncAsyncMiddleware(AgentMiddleware): + def wrap_tool_call(self, request, handler): + call_log.append("sync_called") + return handler(request) + + async def awrap_tool_call(self, request, handler): + call_log.append("async_called") + return await handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[BothSyncAsyncMiddleware()], + checkpointer=InMemorySaver(), + ) + + # Async path + call_log.clear() + result_async = await agent.ainvoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test2"}}, + ) + assert "async_called" in call_log + assert "sync_called" not in call_log + + async def test_mixed_middleware_composition_async_path_fails_with_sync_only( + self, + ) -> None: + """Multiple middleware on async path fails if any are sync-only.""" + + class SyncOnlyMiddleware(AgentMiddleware): + name = "SyncOnly" + + def wrap_tool_call(self, request, handler): + return handler(request) + + class AsyncOnlyMiddleware(AgentMiddleware): + name = "AsyncOnly" + + async def awrap_tool_call(self, request, handler): + return await handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[ + SyncOnlyMiddleware(), + AsyncOnlyMiddleware(), + ], + checkpointer=InMemorySaver(), + ) + + # Should raise NotImplementedError because SyncOnlyMiddleware can't run on async path + with pytest.raises(NotImplementedError): + await agent.ainvoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test"}}, + ) + + def test_mixed_middleware_composition_sync_path_with_async_only_fails(self) -> None: + """Multiple middleware on sync path fails if any are async-only.""" + + class SyncOnlyMiddleware(AgentMiddleware): + name = "SyncOnly" + + def wrap_tool_call(self, request, handler): + return handler(request) + + class AsyncOnlyMiddleware(AgentMiddleware): + name = "AsyncOnly" + + async def awrap_tool_call(self, request, handler): + return await handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[ + SyncOnlyMiddleware(), + AsyncOnlyMiddleware(), # This will break sync path + ], + checkpointer=InMemorySaver(), + ) + + # Should raise NotImplementedError because AsyncOnlyMiddleware can't run on sync path + with pytest.raises(NotImplementedError): + agent.invoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test"}}, + ) + + def test_decorator_sync_only_works_both_paths(self) -> None: + """Decorator-created sync-only middleware works on both paths.""" + call_log = [] + + @wrap_tool_call + def my_wrapper(request: ToolCallRequest, handler): + call_log.append("decorator_sync") + return handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[my_wrapper], + checkpointer=InMemorySaver(), + ) + + # Sync path + call_log.clear() + result = agent.invoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test1"}}, + ) + assert "decorator_sync" in call_log + assert len([m for m in result["messages"] if isinstance(m, ToolMessage)]) == 1 + + async def test_decorator_sync_only_raises_on_async_path(self) -> None: + """Decorator-created sync-only middleware raises on async path.""" + call_log = [] + + @wrap_tool_call + def my_wrapper(request: ToolCallRequest, handler): + call_log.append("decorator_sync") + return handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[my_wrapper], + checkpointer=InMemorySaver(), + ) + + # Should raise NotImplementedError because sync-only decorator doesn't support async path + with pytest.raises(NotImplementedError): + await agent.ainvoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test2"}}, + ) + + async def test_decorator_async_only_works_async_path(self) -> None: + """Decorator-created async-only middleware works on async path.""" + call_log = [] + + @wrap_tool_call + async def my_async_wrapper(request: ToolCallRequest, handler): + call_log.append("decorator_async") + return await handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[my_async_wrapper], + checkpointer=InMemorySaver(), + ) + + result = await agent.ainvoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test"}}, + ) + assert "decorator_async" in call_log + assert len([m for m in result["messages"] if isinstance(m, ToolMessage)]) == 1 + + def test_decorator_async_only_raises_on_sync_path(self) -> None: + """Decorator-created async-only middleware raises on sync path.""" + + @wrap_tool_call + async def my_async_wrapper(request: ToolCallRequest, handler): + return await handler(request) + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "test"}, id="1")], + [], + ] + ) + + agent = create_agent( + model=model, + tools=[search], + middleware=[my_async_wrapper], + checkpointer=InMemorySaver(), + ) + + with pytest.raises(NotImplementedError): + agent.invoke( + {"messages": [HumanMessage("Search")]}, + {"configurable": {"thread_id": "test"}}, + ) diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_todo_middleware.py b/libs/langchain_v1/tests/unit_tests/agents/test_todo_middleware.py new file mode 100644 index 00000000000..bded1390f79 --- /dev/null +++ b/libs/langchain_v1/tests/unit_tests/agents/test_todo_middleware.py @@ -0,0 +1,300 @@ +"""Unit tests for TodoListMiddleware.""" + +from __future__ import annotations + +from typing import cast + +from langchain_core.language_models.fake_chat_models import GenericFakeChatModel +from langchain_core.messages import AIMessage, SystemMessage + +from langchain.agents.middleware.todo import TodoListMiddleware +from langchain.agents.middleware.types import ModelRequest, ModelResponse +from langgraph.runtime import Runtime + + +def _fake_runtime() -> Runtime: + return cast(Runtime, object()) + + +def _make_request(system_prompt: str | None = None) -> ModelRequest: + """Create a minimal ModelRequest for testing.""" + model = GenericFakeChatModel(messages=iter([AIMessage(content="response")])) + return ModelRequest( + model=model, + system_prompt=system_prompt, + messages=[], + tool_choice=None, + tools=[], + response_format=None, + state=cast("AgentState", {}), # type: ignore[name-defined] + runtime=_fake_runtime(), + model_settings={}, + ) + + +def test_adds_system_prompt_when_none_exists() -> None: + """Test that middleware adds system prompt when request has none.""" + middleware = TodoListMiddleware() + request = _make_request(system_prompt=None) + + def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="response")]) + + middleware.wrap_model_call(request, mock_handler) + + # System prompt should be set + assert request.system_prompt is not None + assert "write_todos" in request.system_prompt + + +def test_appends_to_existing_system_prompt() -> None: + """Test that middleware appends to existing system prompt.""" + existing_prompt = "You are a helpful assistant." + middleware = TodoListMiddleware() + request = _make_request(system_prompt=existing_prompt) + + def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="response")]) + + middleware.wrap_model_call(request, mock_handler) + + # System prompt should contain both + assert request.system_prompt is not None + assert existing_prompt in request.system_prompt + assert "write_todos" in request.system_prompt + assert request.system_prompt.startswith(existing_prompt) + + +def test_custom_system_prompt() -> None: + """Test that middleware uses custom system prompt.""" + custom_prompt = "Custom planning instructions" + middleware = TodoListMiddleware(system_prompt=custom_prompt) + request = _make_request(system_prompt=None) + + def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="response")]) + + middleware.wrap_model_call(request, mock_handler) + + # Should use custom prompt + assert request.system_prompt == custom_prompt + + +def test_has_write_todos_tool() -> None: + """Test that middleware registers the write_todos tool.""" + middleware = TodoListMiddleware() + + # Should have one tool registered + assert len(middleware.tools) == 1 + assert middleware.tools[0].name == "write_todos" + + +def test_custom_tool_description() -> None: + """Test that middleware uses custom tool description.""" + custom_description = "Custom todo tool description" + middleware = TodoListMiddleware(tool_description=custom_description) + + # Tool should use custom description + assert len(middleware.tools) == 1 + assert middleware.tools[0].description == custom_description + + +# ============================================================================== +# Async Tests +# ============================================================================== + + +async def test_adds_system_prompt_when_none_exists_async() -> None: + """Test async version - middleware adds system prompt when request has none.""" + middleware = TodoListMiddleware() + request = _make_request(system_prompt=None) + + async def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="response")]) + + await middleware.awrap_model_call(request, mock_handler) + + # System prompt should be set + assert request.system_prompt is not None + assert "write_todos" in request.system_prompt + + +async def test_appends_to_existing_system_prompt_async() -> None: + """Test async version - middleware appends to existing system prompt.""" + existing_prompt = "You are a helpful assistant." + middleware = TodoListMiddleware() + request = _make_request(system_prompt=existing_prompt) + + async def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="response")]) + + await middleware.awrap_model_call(request, mock_handler) + + # System prompt should contain both + assert request.system_prompt is not None + assert existing_prompt in request.system_prompt + assert "write_todos" in request.system_prompt + assert request.system_prompt.startswith(existing_prompt) + + +async def test_custom_system_prompt_async() -> None: + """Test async version - middleware uses custom system prompt.""" + custom_prompt = "Custom planning instructions" + middleware = TodoListMiddleware(system_prompt=custom_prompt) + request = _make_request(system_prompt=None) + + async def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="response")]) + + await middleware.awrap_model_call(request, mock_handler) + + # Should use custom prompt + assert request.system_prompt == custom_prompt + + +async def test_handler_called_with_modified_request_async() -> None: + """Test async version - handler receives the modified request.""" + middleware = TodoListMiddleware() + request = _make_request(system_prompt="Original") + handler_called = {"value": False} + received_prompt = {"value": None} + + async def mock_handler(req: ModelRequest) -> ModelResponse: + handler_called["value"] = True + received_prompt["value"] = req.system_prompt + return ModelResponse(result=[AIMessage(content="response")]) + + await middleware.awrap_model_call(request, mock_handler) + + assert handler_called["value"] + assert received_prompt["value"] is not None + assert "Original" in received_prompt["value"] + assert "write_todos" in received_prompt["value"] + + +# ============================================================================== +# SystemMessage Tests +# ============================================================================== + + +def test_appends_to_system_message_with_list_content() -> None: + """Test that middleware appends to SystemMessage with list content.""" + existing_prompt = SystemMessage(content=["You are helpful.", "Be concise."]) + middleware = TodoListMiddleware() + + model = GenericFakeChatModel(messages=iter([AIMessage(content="response")])) + request = ModelRequest( + model=model, + system_prompt=existing_prompt, + messages=[], + tool_choice=None, + tools=[], + response_format=None, + state=cast("AgentState", {}), # type: ignore[name-defined] + runtime=_fake_runtime(), + model_settings={}, + ) + + def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="response")]) + + middleware.wrap_model_call(request, mock_handler) + + # System prompt should be a SystemMessage with combined content + assert isinstance(request.system_prompt, SystemMessage) + assert isinstance(request.system_prompt.content, list) + assert len(request.system_prompt.content) == 3 + assert request.system_prompt.content[0] == "You are helpful." + assert request.system_prompt.content[1] == "Be concise." + assert "write_todos" in request.system_prompt.content[2] + + +async def test_appends_to_system_message_with_string_content_async() -> None: + """Test async version - middleware appends to SystemMessage with string content.""" + existing_prompt = SystemMessage(content="You are a helpful assistant.") + middleware = TodoListMiddleware() + + model = GenericFakeChatModel(messages=iter([AIMessage(content="response")])) + request = ModelRequest( + model=model, + system_prompt=existing_prompt, + messages=[], + tool_choice=None, + tools=[], + response_format=None, + state=cast("AgentState", {}), # type: ignore[name-defined] + runtime=_fake_runtime(), + model_settings={}, + ) + + async def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="response")]) + + await middleware.awrap_model_call(request, mock_handler) + + # System prompt should be a SystemMessage with combined content + assert isinstance(request.system_prompt, SystemMessage) + assert isinstance(request.system_prompt.content, str) + assert "You are a helpful assistant." in request.system_prompt.content + assert middleware.system_prompt in request.system_prompt.content + + +async def test_appends_to_system_message_with_list_content_async() -> None: + """Test async version - middleware appends to SystemMessage with list content.""" + existing_prompt = SystemMessage(content=["You are helpful.", "Be concise."]) + middleware = TodoListMiddleware() + + model = GenericFakeChatModel(messages=iter([AIMessage(content="response")])) + request = ModelRequest( + model=model, + system_prompt=existing_prompt, + messages=[], + tool_choice=None, + tools=[], + response_format=None, + state=cast("AgentState", {}), # type: ignore[name-defined] + runtime=_fake_runtime(), + model_settings={}, + ) + + async def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="response")]) + + await middleware.awrap_model_call(request, mock_handler) + + # System prompt should be a SystemMessage with combined content + assert isinstance(request.system_prompt, SystemMessage) + assert isinstance(request.system_prompt.content, list) + assert len(request.system_prompt.content) == 3 + assert request.system_prompt.content[0] == "You are helpful." + assert request.system_prompt.content[1] == "Be concise." + assert "write_todos" in request.system_prompt.content[2] + + +def test_creates_system_message_when_prompt_is_system_message() -> None: + """Test that middleware preserves SystemMessage type.""" + existing_prompt = SystemMessage(content="Original instructions") + middleware = TodoListMiddleware(system_prompt="Todo instructions") + + model = GenericFakeChatModel(messages=iter([AIMessage(content="response")])) + request = ModelRequest( + model=model, + system_prompt=existing_prompt, + messages=[], + tool_choice=None, + tools=[], + response_format=None, + state=cast("AgentState", {}), # type: ignore[name-defined] + runtime=_fake_runtime(), + model_settings={}, + ) + + def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="response")]) + + result = middleware.wrap_model_call(request, mock_handler) + + # Result should be returned from handler + assert isinstance(result, ModelResponse) + # System prompt should still be a SystemMessage + assert isinstance(request.system_prompt, SystemMessage) diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_tool_call_limit.py b/libs/langchain_v1/tests/unit_tests/agents/test_tool_call_limit.py index d38849a040f..d382eb885d9 100644 --- a/libs/langchain_v1/tests/unit_tests/agents/test_tool_call_limit.py +++ b/libs/langchain_v1/tests/unit_tests/agents/test_tool_call_limit.py @@ -9,103 +9,21 @@ from langchain.agents.factory import create_agent from langchain.agents.middleware.tool_call_limit import ( ToolCallLimitExceededError, ToolCallLimitMiddleware, - _count_tool_calls_in_messages, - _get_run_messages, ) from tests.unit_tests.agents.test_middleware_agent import FakeToolCallingModel -def test_count_tool_calls_in_messages(): - """Test counting tool calls in messages.""" - # Test with no messages - assert _count_tool_calls_in_messages([]) == 0 - - # Test with messages but no tool calls - messages = [ - HumanMessage("Hello"), - AIMessage("Hi there"), - ] - assert _count_tool_calls_in_messages(messages) == 0 - - # Test with tool calls - messages = [ - HumanMessage("Search for something"), - AIMessage( - "Searching...", - tool_calls=[ - {"name": "search", "args": {"query": "test"}, "id": "1"}, - {"name": "calculator", "args": {"expression": "1+1"}, "id": "2"}, - ], - ), - ToolMessage("Result 1", tool_call_id="1"), - ToolMessage("Result 2", tool_call_id="2"), - AIMessage( - "More searching...", - tool_calls=[ - {"name": "search", "args": {"query": "another"}, "id": "3"}, - ], - ), - ] - # Total: 3 tool calls (2 from first AI message, 1 from second) - assert _count_tool_calls_in_messages(messages) == 3 - - # Test filtering by tool name - assert _count_tool_calls_in_messages(messages, tool_name="search") == 2 - assert _count_tool_calls_in_messages(messages, tool_name="calculator") == 1 - assert _count_tool_calls_in_messages(messages, tool_name="nonexistent") == 0 - - -def test_get_run_messages(): - """Test extracting run messages after last HumanMessage.""" - # Test with no messages - assert _get_run_messages([]) == [] - - # Test with no HumanMessage - messages = [ - AIMessage("Hello"), - AIMessage("World"), - ] - assert _get_run_messages(messages) == messages - - # Test with HumanMessage at the end - messages = [ - AIMessage("Previous"), - HumanMessage("New question"), - ] - assert _get_run_messages(messages) == [] - - # Test with messages after HumanMessage - messages = [ - AIMessage("Previous"), - ToolMessage("Previous result", tool_call_id="0"), - HumanMessage("New question"), - AIMessage( - "Response", - tool_calls=[{"name": "search", "args": {}, "id": "1"}], - ), - ToolMessage("Result", tool_call_id="1"), - ] - run_messages = _get_run_messages(messages) - assert len(run_messages) == 2 - assert isinstance(run_messages[0], AIMessage) - assert isinstance(run_messages[1], ToolMessage) - - def test_middleware_initialization_validation(): """Test that middleware initialization validates parameters correctly.""" # Test that at least one limit must be specified with pytest.raises(ValueError, match="At least one limit must be specified"): ToolCallLimitMiddleware() - # Test invalid exit behavior - with pytest.raises(ValueError, match="Invalid exit_behavior"): - ToolCallLimitMiddleware(thread_limit=5, exit_behavior="invalid") # type: ignore[arg-type] - - # Test valid initialization + # Test valid initialization with both limits middleware = ToolCallLimitMiddleware(thread_limit=5, run_limit=3) assert middleware.thread_limit == 5 assert middleware.run_limit == 3 - assert middleware.exit_behavior == "end" + assert middleware.exit_behavior == "continue" assert middleware.tool_name is None # Test with tool name @@ -114,6 +32,32 @@ def test_middleware_initialization_validation(): assert middleware.thread_limit == 5 assert middleware.run_limit is None + # Test exit behaviors + for behavior in ["error", "end", "continue"]: + middleware = ToolCallLimitMiddleware(thread_limit=5, exit_behavior=behavior) + assert middleware.exit_behavior == behavior + + # Test invalid exit behavior + with pytest.raises(ValueError, match="Invalid exit_behavior"): + ToolCallLimitMiddleware(thread_limit=5, exit_behavior="invalid") # type: ignore[arg-type] + + # Test run_limit exceeding thread_limit + with pytest.raises( + ValueError, + match="run_limit .* cannot exceed thread_limit", + ): + ToolCallLimitMiddleware(thread_limit=3, run_limit=5) + + # Test run_limit equal to thread_limit (should be valid) + middleware = ToolCallLimitMiddleware(thread_limit=5, run_limit=5) + assert middleware.thread_limit == 5 + assert middleware.run_limit == 5 + + # Test run_limit less than thread_limit (should be valid) + middleware = ToolCallLimitMiddleware(thread_limit=5, run_limit=3) + assert middleware.thread_limit == 5 + assert middleware.run_limit == 3 + def test_middleware_name_property(): """Test that the name property includes tool name when specified.""" @@ -134,57 +78,98 @@ def test_middleware_name_property(): def test_middleware_unit_functionality(): - """Test that the middleware works as expected in isolation.""" - # Test with end behavior - global tool limit - middleware = ToolCallLimitMiddleware(thread_limit=3, run_limit=2) + """Test that the middleware works as expected in isolation. - # Mock runtime (not used in current implementation) + Tests basic count tracking, thread limit, run limit, and limit-not-exceeded cases. + """ + middleware = ToolCallLimitMiddleware(thread_limit=3, run_limit=2, exit_behavior="end") runtime = None - # Test when limits are not exceeded + # Test when limits are not exceeded - counts should increment normally state = { "messages": [ HumanMessage("Question"), - AIMessage( - "Response", - tool_calls=[{"name": "search", "args": {}, "id": "1"}], - ), - ] + AIMessage("Response", tool_calls=[{"name": "search", "args": {}, "id": "1"}]), + ], + "thread_tool_call_count": {}, + "run_tool_call_count": {}, } - result = middleware.before_model(state, runtime) # type: ignore[arg-type] - assert result is None + result = middleware.after_model(state, runtime) # type: ignore[arg-type] + assert result is not None + assert result["thread_tool_call_count"] == {"__all__": 1} + assert result["run_tool_call_count"] == {"__all__": 1} + assert "jump_to" not in result - # Test when thread limit is exceeded + # Test thread limit exceeded (start at thread_limit so next call will exceed) state = { "messages": [ - HumanMessage("Question 1"), - AIMessage( - "Response 1", - tool_calls=[ - {"name": "search", "args": {}, "id": "1"}, - {"name": "calculator", "args": {}, "id": "2"}, - ], - ), - ToolMessage("Result 1", tool_call_id="1"), - ToolMessage("Result 2", tool_call_id="2"), HumanMessage("Question 2"), - AIMessage( - "Response 2", - tool_calls=[{"name": "search", "args": {}, "id": "3"}], - ), - ] + AIMessage("Response 2", tool_calls=[{"name": "search", "args": {}, "id": "3"}]), + ], + "thread_tool_call_count": {"__all__": 3}, # Already exceeds thread_limit=3 + "run_tool_call_count": {"__all__": 0}, # No calls yet } - result = middleware.before_model(state, runtime) # type: ignore[arg-type] + result = middleware.after_model(state, runtime) # type: ignore[arg-type] assert result is not None assert result["jump_to"] == "end" - assert "messages" in result - assert len(result["messages"]) == 1 - assert "thread limit (3/3)" in result["messages"][0].content + # Check the ToolMessage (sent to model - no thread/run details) + tool_msg = result["messages"][0] + assert isinstance(tool_msg, ToolMessage) + assert tool_msg.status == "error" + assert "Tool call limit exceeded" in tool_msg.content + # Should include "Do not" instruction + assert "Do not" in tool_msg.content, ( + "Tool message should include 'Do not' instruction when limit exceeded" + ) + # Check the final AI message (displayed to user - includes thread/run details) + final_ai_msg = result["messages"][-1] + assert isinstance(final_ai_msg, AIMessage) + assert "limit" in final_ai_msg.content.lower() + assert "thread limit exceeded" in final_ai_msg.content.lower() + # Thread count stays at 3 (blocked call not counted) + assert result["thread_tool_call_count"] == {"__all__": 3} + # Run count goes to 1 (includes blocked call) + assert result["run_tool_call_count"] == {"__all__": 1} - # Test when run limit is exceeded + # Test run limit exceeded (thread count must be >= run count) state = { "messages": [ HumanMessage("Question"), + AIMessage("Response", tool_calls=[{"name": "search", "args": {}, "id": "1"}]), + ], + "thread_tool_call_count": {"__all__": 2}, + "run_tool_call_count": {"__all__": 2}, + } + result = middleware.after_model(state, runtime) # type: ignore[arg-type] + assert result is not None + assert result["jump_to"] == "end" + # Check the final AI message includes run limit details + final_ai_msg = result["messages"][-1] + assert "run limit exceeded" in final_ai_msg.content + assert "3/2 calls" in final_ai_msg.content + # Check the tool message (sent to model) - should always include "Do not" instruction + tool_msg = result["messages"][0] + assert isinstance(tool_msg, ToolMessage) + assert "Tool call limit exceeded" in tool_msg.content + assert "Do not" in tool_msg.content, ( + "Tool message should include 'Do not' instruction for both run and thread limits" + ) + + +def test_middleware_end_behavior_with_unrelated_parallel_tool_calls(): + """Test that 'end' behavior raises NotImplementedError when there are parallel calls to unrelated tools. + + When limiting a specific tool with "end" behavior and the model proposes parallel calls + to BOTH the limited tool AND other tools, we can't handle this scenario (we'd be stopping + execution while other tools should run). + """ + # Limit search tool specifically + middleware = ToolCallLimitMiddleware(tool_name="search", thread_limit=1, exit_behavior="end") + runtime = None + + # Test with search + calculator calls when search exceeds limit + state = { + "messages": [ AIMessage( "Response", tool_calls=[ @@ -192,110 +177,89 @@ def test_middleware_unit_functionality(): {"name": "calculator", "args": {}, "id": "2"}, ], ), - ] + ], + "thread_tool_call_count": {"search": 1}, + "run_tool_call_count": {"search": 1}, } - result = middleware.before_model(state, runtime) # type: ignore[arg-type] - assert result is not None - assert result["jump_to"] == "end" - assert "run limit (2/2)" in result["messages"][0].content + + with pytest.raises( + NotImplementedError, match="Cannot end execution with other tool calls pending" + ): + middleware.after_model(state, runtime) # type: ignore[arg-type] def test_middleware_with_specific_tool(): - """Test middleware that limits a specific tool.""" - # Limit only the "search" tool - middleware = ToolCallLimitMiddleware(tool_name="search", thread_limit=2, run_limit=1) - + """Test middleware that limits a specific tool while ignoring others.""" + middleware = ToolCallLimitMiddleware( + tool_name="search", thread_limit=2, run_limit=1, exit_behavior="end" + ) runtime = None - # Test with search tool calls + # Test search tool exceeding run limit state = { "messages": [ - HumanMessage("Question"), - AIMessage( - "Response", - tool_calls=[ - {"name": "search", "args": {}, "id": "1"}, - {"name": "calculator", "args": {}, "id": "2"}, - ], - ), - ] + AIMessage("Response 2", tool_calls=[{"name": "search", "args": {}, "id": "3"}]), + ], + "thread_tool_call_count": {"search": 1}, + "run_tool_call_count": {"search": 1}, } - # Run limit for search is 1, should be exceeded - result = middleware.before_model(state, runtime) # type: ignore[arg-type] + result = middleware.after_model(state, runtime) # type: ignore[arg-type] assert result is not None - assert "'search' tool call" in result["messages"][0].content - assert "run limit (1/1)" in result["messages"][0].content + assert result["jump_to"] == "end" + assert "search" in result["messages"][0].content.lower() - # Test with only calculator calls (should not trigger limit) + # Test calculator tool - should be ignored by search-specific middleware state = { "messages": [ - HumanMessage("Question"), - AIMessage( - "Response", - tool_calls=[ - {"name": "calculator", "args": {}, "id": "1"}, - {"name": "calculator", "args": {}, "id": "2"}, - ], - ), - ] + AIMessage("Response", tool_calls=[{"name": "calculator", "args": {}, "id": "1"}] * 10), + ], + "thread_tool_call_count": {}, + "run_tool_call_count": {}, } - result = middleware.before_model(state, runtime) # type: ignore[arg-type] - assert result is None + result = middleware.after_model(state, runtime) # type: ignore[arg-type] + assert result is None, "Calculator calls shouldn't be counted by search-specific middleware" def test_middleware_error_behavior(): - """Test middleware with error exit behavior.""" - middleware = ToolCallLimitMiddleware(thread_limit=2, run_limit=1, exit_behavior="error") - + """Test that middleware raises ToolCallLimitExceededError when configured with exit_behavior='error'.""" + middleware = ToolCallLimitMiddleware(thread_limit=2, exit_behavior="error") runtime = None - # Test exception when thread limit exceeded state = { - "messages": [ - HumanMessage("Q"), - AIMessage("R", tool_calls=[{"name": "search", "args": {}, "id": "1"}]), - ToolMessage("Result", tool_call_id="1"), - AIMessage("R", tool_calls=[{"name": "search", "args": {}, "id": "2"}]), - ] + "messages": [AIMessage("Response", tool_calls=[{"name": "search", "args": {}, "id": "1"}])], + "thread_tool_call_count": {"__all__": 2}, + "run_tool_call_count": {"__all__": 2}, } + with pytest.raises(ToolCallLimitExceededError) as exc_info: - middleware.before_model(state, runtime) # type: ignore[arg-type] + middleware.after_model(state, runtime) # type: ignore[arg-type] - assert "thread limit (2/2)" in str(exc_info.value) - assert exc_info.value.thread_count == 2 - assert exc_info.value.thread_limit == 2 - - # Test exception when run limit exceeded - state = { - "messages": [ - HumanMessage("Question"), - AIMessage("R", tool_calls=[{"name": "search", "args": {}, "id": "1"}]), - ] - } - with pytest.raises(ToolCallLimitExceededError) as exc_info: - middleware.before_model(state, runtime) # type: ignore[arg-type] - - assert "run limit (1/1)" in str(exc_info.value) + error = exc_info.value + # Thread count in error message shows hypothetical count (what it would have been) + assert error.thread_count == 3 + assert error.thread_limit == 2 + # Run count includes the blocked call + assert error.run_count == 3 + assert error.tool_name is None def test_multiple_middleware_instances(): - """Test using multiple ToolCallLimitMiddleware instances in the same agent.""" + """Test that multiple middleware instances can coexist and track independently.""" @tool def search(query: str) -> str: """Search for information.""" - return f"Results for: {query}" + return f"Results for {query}" @tool def calculator(expression: str) -> str: """Calculate an expression.""" return f"Result: {expression}" - # Create model that makes multiple tool calls model = FakeToolCallingModel( tool_calls=[ [ - ToolCall(name="search", args={"query": "test1"}, id="1"), + ToolCall(name="search", args={"query": "test"}, id="1"), ToolCall(name="calculator", args={"expression": "1+1"}, id="2"), ], [ @@ -309,126 +273,335 @@ def test_multiple_middleware_instances(): ] ) - # Global limit: max 5 tool calls per thread - # Search-specific limit: max 2 search calls per thread - global_limiter = ToolCallLimitMiddleware(thread_limit=5) - search_limiter = ToolCallLimitMiddleware(tool_name="search", thread_limit=2) + # Create two middleware instances - one for each tool + search_limiter = ToolCallLimitMiddleware( + tool_name="search", thread_limit=2, exit_behavior="end" + ) + calc_limiter = ToolCallLimitMiddleware( + tool_name="calculator", thread_limit=2, exit_behavior="end" + ) agent = create_agent( model=model, tools=[search, calculator], - middleware=[global_limiter, search_limiter], + middleware=[search_limiter, calc_limiter], checkpointer=InMemorySaver(), ) - # First invocation: 2 search calls, 2 calculator calls (4 total) - # Should succeed - within both limits result = agent.invoke( - {"messages": [HumanMessage("First question")]}, - {"configurable": {"thread_id": "test_thread"}}, - ) - assert len(result["messages"]) > 0 - - # Second invocation would add 1 more search call (total 3), exceeding search limit - # Should hit the search-specific limit - result = agent.invoke( - {"messages": [HumanMessage("Second question")]}, + {"messages": [HumanMessage("Question")]}, {"configurable": {"thread_id": "test_thread"}}, ) - # Check that the limit was hit - last_message = result["messages"][-1] - assert isinstance(last_message, AIMessage) - assert "'search' tool call" in last_message.content - assert "thread limit" in last_message.content + # The agent should stop after the second iteration + # because search will hit its limit (3 calls > 2 limit) + ai_limit_messages = [ + msg + for msg in result["messages"] + if isinstance(msg, AIMessage) and "limit" in msg.content.lower() + ] + assert len(ai_limit_messages) > 0, "Should have AI message explaining limit was exceeded" def test_run_limit_with_multiple_human_messages(): - """Test that run limit correctly resets after each HumanMessage.""" + """Test that run limits reset between invocations. + + Verifies that when using run_limit, the count resets for each new user message, + allowing execution to continue across multiple invocations in the same thread. + """ @tool def search(query: str) -> str: """Search for information.""" - return f"Results for: {query}" + return f"Results for {query}" - # Model makes 2 tool calls, then 1 tool call, then no tool calls model = FakeToolCallingModel( tool_calls=[ - [ - ToolCall(name="search", args={"query": "test1"}, id="1"), - ToolCall(name="search", args={"query": "test2"}, id="2"), - ], - [ToolCall(name="search", args={"query": "test3"}, id="3")], + [ToolCall(name="search", args={"query": "test1"}, id="1")], + [ToolCall(name="search", args={"query": "test2"}, id="2")], [], ] ) - # Run limit of 2 tool calls per run - middleware = ToolCallLimitMiddleware(run_limit=2) - + middleware = ToolCallLimitMiddleware(run_limit=1, exit_behavior="end") agent = create_agent( model=model, tools=[search], middleware=[middleware], checkpointer=InMemorySaver() ) - thread_config = {"configurable": {"thread_id": "test_thread"}} + # First invocation: test1 executes successfully, test2 exceeds limit + result1 = agent.invoke( + {"messages": [HumanMessage("Question 1")]}, + {"configurable": {"thread_id": "test_thread"}}, + ) + tool_messages = [msg for msg in result1["messages"] if isinstance(msg, ToolMessage)] + successful_tool_msgs = [msg for msg in tool_messages if msg.status != "error"] + error_tool_msgs = [msg for msg in tool_messages if msg.status == "error"] + ai_limit_msgs = [ + msg + for msg in result1["messages"] + if isinstance(msg, AIMessage) and "limit" in msg.content.lower() and not msg.tool_calls + ] - # First run: 2 tool calls, should hit run limit - result = agent.invoke({"messages": [HumanMessage("Question 1")]}, thread_config) - last_message = result["messages"][-1] - assert "run limit (2/2)" in last_message.content + assert len(successful_tool_msgs) == 1, "Should have 1 successful tool execution (test1)" + assert len(error_tool_msgs) == 1, "Should have 1 artificial error ToolMessage (test2)" + assert len(ai_limit_msgs) == 1, "Should have AI limit message after test2 proposed" - # Second run: starts fresh, should also hit run limit after 2 calls - result = agent.invoke({"messages": [HumanMessage("Question 2")]}, thread_config) - last_message = result["messages"][-1] - # This should also hit the limit because run count resets - assert "run limit (2/2)" in last_message.content + # Second invocation: run limit should reset, allowing continued execution + result2 = agent.invoke( + {"messages": [HumanMessage("Question 2")]}, + {"configurable": {"thread_id": "test_thread"}}, + ) + + assert len(result2["messages"]) > len(result1["messages"]), ( + "Second invocation should add new messages, proving run limit reset" + ) def test_exception_error_messages(): - """Test that exceptions provide clear error messages.""" - # Test global tool limit - middleware = ToolCallLimitMiddleware(thread_limit=3, run_limit=2, exit_behavior="error") - - state = { - "messages": [ - HumanMessage("Q"), - AIMessage("R", tool_calls=[{"name": "search", "args": {}, "id": "1"}]), - ToolMessage("Result", tool_call_id="1"), - AIMessage("R", tool_calls=[{"name": "search", "args": {}, "id": "2"}]), - ToolMessage("Result", tool_call_id="2"), - AIMessage("R", tool_calls=[{"name": "search", "args": {}, "id": "3"}]), - ] - } - + """Test that error messages include expected information.""" + # Test for specific tool with pytest.raises(ToolCallLimitExceededError) as exc_info: - middleware.before_model(state, None) # type: ignore[arg-type] + raise ToolCallLimitExceededError( + thread_count=5, run_count=3, thread_limit=4, run_limit=2, tool_name="search" + ) + msg = str(exc_info.value) + assert "search" in msg.lower() + assert "5/4" in msg or "thread" in msg.lower() - error_msg = str(exc_info.value) - assert "Tool call limits exceeded" in error_msg - assert "thread limit (3/3)" in error_msg + # Test for all tools + with pytest.raises(ToolCallLimitExceededError) as exc_info: + raise ToolCallLimitExceededError( + thread_count=10, run_count=5, thread_limit=8, run_limit=None, tool_name=None + ) + msg = str(exc_info.value) + assert "10/8" in msg or "thread" in msg.lower() - # Test specific tool limit - middleware = ToolCallLimitMiddleware(tool_name="search", thread_limit=2, exit_behavior="error") +def test_limit_reached_but_not_exceeded(): + """Test that limits are only triggered when exceeded (>), not when reached (==).""" + middleware = ToolCallLimitMiddleware(thread_limit=3, run_limit=2, exit_behavior="end") + runtime = None + + # Test when limit is reached exactly (count = limit) - should not trigger + state = { + "messages": [AIMessage("Response", tool_calls=[{"name": "search", "args": {}, "id": "1"}])], + "thread_tool_call_count": {"__all__": 2}, # After +1 will be exactly 3 + "run_tool_call_count": {"__all__": 1}, + } + result = middleware.after_model(state, runtime) # type: ignore[arg-type] + assert result is not None + assert "jump_to" not in result + assert result["thread_tool_call_count"]["__all__"] == 3 + + # Test when limit is exceeded (count > limit) - should trigger + state = { + "messages": [AIMessage("Response", tool_calls=[{"name": "search", "args": {}, "id": "1"}])], + "thread_tool_call_count": {"__all__": 3}, # After +1 will be 4 > 3 + "run_tool_call_count": {"__all__": 1}, + } + result = middleware.after_model(state, runtime) # type: ignore[arg-type] + assert result is not None + assert "jump_to" in result + assert result["jump_to"] == "end" + + +def test_exit_behavior_continue(): + """Test that exit_behavior='continue' blocks only the exceeded tool, not others. + + Verifies that when a specific tool hits its limit, it gets blocked with error messages + while other tools continue to execute normally. + """ + + @tool + def search(query: str) -> str: + """Search for information.""" + return f"Search: {query}" + + @tool + def calculator(expression: str) -> str: + """Calculate an expression.""" + return f"Calc: {expression}" + + model = FakeToolCallingModel( + tool_calls=[ + [ + ToolCall(name="search", args={"query": "q1"}, id="1"), + ToolCall(name="calculator", args={"expression": "1+1"}, id="2"), + ], + [ + ToolCall(name="search", args={"query": "q2"}, id="3"), + ToolCall(name="calculator", args={"expression": "2+2"}, id="4"), + ], + [ + ToolCall(name="search", args={"query": "q3"}, id="5"), # Should be blocked + ToolCall(name="calculator", args={"expression": "3+3"}, id="6"), # Should work + ], + [], + ] + ) + + # Limit search to 2 calls, but allow other tools to continue + search_limiter = ToolCallLimitMiddleware( + tool_name="search", thread_limit=2, exit_behavior="continue" + ) + + agent = create_agent( + model=model, + tools=[search, calculator], + middleware=[search_limiter], + checkpointer=InMemorySaver(), + ) + + result = agent.invoke( + {"messages": [HumanMessage("Question")]}, + {"configurable": {"thread_id": "test_thread"}}, + ) + + tool_messages = [msg for msg in result["messages"] if isinstance(msg, ToolMessage)] + + # Verify search has 2 successful + 1 blocked, calculator has all 3 successful + successful_search_msgs = [msg for msg in tool_messages if "Search:" in msg.content] + blocked_search_msgs = [ + msg + for msg in tool_messages + if "limit" in msg.content.lower() and "search" in msg.content.lower() + ] + successful_calc_msgs = [msg for msg in tool_messages if "Calc:" in msg.content] + + assert len(successful_search_msgs) == 2, "Should have 2 successful search calls" + assert len(blocked_search_msgs) == 1, "Should have 1 blocked search call with limit error" + assert len(successful_calc_msgs) == 3, "All calculator calls should succeed" + + +def test_thread_count_excludes_blocked_run_calls(): + """Test that thread count only includes allowed calls, not blocked run-scoped calls. + + When run_limit is lower than thread_limit and multiple parallel calls are made, + only the allowed calls should increment the thread count. + + Example: If run_limit=1 and 3 parallel calls are made, thread count should be 1 + (not 3) because the other 2 were blocked by the run limit. + """ + # Set run_limit=1, thread_limit=10 (much higher) + middleware = ToolCallLimitMiddleware(thread_limit=10, run_limit=1, exit_behavior="continue") + runtime = None + + # Make 3 parallel tool calls - only 1 should be allowed by run_limit state = { "messages": [ - HumanMessage("Q"), AIMessage( - "R", + "Response", tool_calls=[ {"name": "search", "args": {}, "id": "1"}, - {"name": "calculator", "args": {}, "id": "2"}, + {"name": "search", "args": {}, "id": "2"}, + {"name": "search", "args": {}, "id": "3"}, ], - ), - ToolMessage("Result", tool_call_id="1"), - ToolMessage("Result", tool_call_id="2"), - AIMessage("R", tool_calls=[{"name": "search", "args": {}, "id": "3"}]), - ] + ) + ], + "thread_tool_call_count": {}, + "run_tool_call_count": {}, } + result = middleware.after_model(state, runtime) # type: ignore[arg-type] + assert result is not None - with pytest.raises(ToolCallLimitExceededError) as exc_info: - middleware.before_model(state, None) # type: ignore[arg-type] + # Thread count should be 1 (only the allowed call) + assert result["thread_tool_call_count"]["__all__"] == 1, ( + "Thread count should only include the 1 allowed call, not the 2 blocked calls" + ) + # Run count should be 3 (all attempted calls) + assert result["run_tool_call_count"]["__all__"] == 3, ( + "Run count should include all 3 attempted calls" + ) - error_msg = str(exc_info.value) - assert "'search' tool call limits exceeded" in error_msg - assert "thread limit (2/2)" in error_msg + # Verify 2 error messages were created for blocked calls + assert "messages" in result + error_messages = [msg for msg in result["messages"] if isinstance(msg, ToolMessage)] + assert len(error_messages) == 2, "Should have 2 error messages for the 2 blocked calls" + + +def test_unified_error_messages(): + """Test that error messages instruct model not to call again for both run and thread limits. + + Previously, only thread limit messages included 'Do not' instruction. + Now both run and thread limit messages should include it. + """ + middleware = ToolCallLimitMiddleware(thread_limit=10, run_limit=1, exit_behavior="continue") + runtime = None + + # Test with run limit exceeded (thread limit not exceeded) + state = { + "messages": [AIMessage("Response", tool_calls=[{"name": "search", "args": {}, "id": "2"}])], + "thread_tool_call_count": {"__all__": 1}, # Under thread limit + "run_tool_call_count": {"__all__": 1}, # At run limit, next call will exceed + } + result = middleware.after_model(state, runtime) # type: ignore[arg-type] + assert result is not None + + # Check the error message includes "Do not" instruction + error_messages = [msg for msg in result["messages"] if isinstance(msg, ToolMessage)] + assert len(error_messages) == 1 + error_content = error_messages[0].content + assert "Do not" in error_content, ( + "Run limit error message should include 'Do not' instruction to guide model behavior" + ) + + +def test_end_behavior_creates_artificial_messages(): + """Test that 'end' behavior creates an AI message explaining why execution stopped. + + Verifies that when limit is exceeded with exit_behavior='end', the middleware: + 1. Injects an artificial error ToolMessage for the blocked tool call + 2. Adds an AI message explaining the limit to the user + 3. Jumps to end, stopping execution + """ + + @tool + def search(query: str) -> str: + """Search for information.""" + return f"Results: {query}" + + model = FakeToolCallingModel( + tool_calls=[ + [ToolCall(name="search", args={"query": "q1"}, id="1")], + [ToolCall(name="search", args={"query": "q2"}, id="2")], + [ToolCall(name="search", args={"query": "q3"}, id="3")], # Exceeds limit + [], + ] + ) + + limiter = ToolCallLimitMiddleware(thread_limit=2, exit_behavior="end") + agent = create_agent( + model=model, tools=[search], middleware=[limiter], checkpointer=InMemorySaver() + ) + + result = agent.invoke( + {"messages": [HumanMessage("Test")]}, {"configurable": {"thread_id": "test"}} + ) + + # Verify AI message explaining the limit (displayed to user - includes thread/run details) + ai_limit_messages = [ + msg + for msg in result["messages"] + if isinstance(msg, AIMessage) and "limit" in msg.content.lower() and not msg.tool_calls + ] + assert len(ai_limit_messages) == 1, "Should have exactly one AI message explaining the limit" + + ai_msg_content = ai_limit_messages[0].content.lower() + assert "thread limit exceeded" in ai_msg_content or "run limit exceeded" in ai_msg_content, ( + "AI message should include thread/run limit details for the user" + ) + + # Verify tool message counts + tool_messages = [msg for msg in result["messages"] if isinstance(msg, ToolMessage)] + successful_tool_msgs = [msg for msg in tool_messages if msg.status != "error"] + error_tool_msgs = [msg for msg in tool_messages if msg.status == "error"] + + assert len(successful_tool_msgs) == 2, "Should have 2 successful tool messages (q1, q2)" + assert len(error_tool_msgs) == 1, "Should have 1 artificial error tool message (q3)" + + # Verify the error tool message (sent to model - no thread/run details, includes instruction) + error_msg_content = error_tool_msgs[0].content + assert "Tool call limit exceeded" in error_msg_content + assert "Do not" in error_msg_content, ( + "Tool message should instruct model not to call tool again" + ) diff --git a/libs/langchain_v1/tests/unit_tests/agents/test_tool_node.py b/libs/langchain_v1/tests/unit_tests/agents/test_tool_node.py deleted file mode 100644 index 65439b28888..00000000000 --- a/libs/langchain_v1/tests/unit_tests/agents/test_tool_node.py +++ /dev/null @@ -1,1478 +0,0 @@ -import contextlib -import dataclasses -import json -from functools import partial -from typing import ( - Annotated, - Any, - NoReturn, - TypeVar, - Union, -) - -import pytest -from langchain_core.messages import ( - AIMessage, - AnyMessage, - HumanMessage, - RemoveMessage, - ToolCall, - ToolMessage, -) -from langchain_core.tools import BaseTool, ToolException -from langchain_core.tools import tool as dec_tool -from langgraph.config import get_stream_writer -from langgraph.errors import GraphBubbleUp, GraphInterrupt -from langgraph.graph import START, MessagesState, StateGraph -from langgraph.graph.message import REMOVE_ALL_MESSAGES, add_messages -from langgraph.store.base import BaseStore -from langgraph.store.memory import InMemoryStore -from langgraph.types import Command, Send -from pydantic import BaseModel -from pydantic.v1 import BaseModel as BaseModelV1 -from typing_extensions import TypedDict - -from langchain.tools import ( - ToolNode, - InjectedState, - InjectedStore, -) -from langchain.tools.tool_node import TOOL_CALL_ERROR_TEMPLATE, ToolInvocationError, tools_condition - -from .messages import _AnyIdHumanMessage, _AnyIdToolMessage -from .model import FakeToolCallingModel - -pytestmark = pytest.mark.anyio - - -def tool1(some_val: int, some_other_val: str) -> str: - """Tool 1 docstring.""" - if some_val == 0: - msg = "Test error" - raise ValueError(msg) - return f"{some_val} - {some_other_val}" - - -async def tool2(some_val: int, some_other_val: str) -> str: - """Tool 2 docstring.""" - if some_val == 0: - msg = "Test error" - raise ToolException(msg) - return f"tool2: {some_val} - {some_other_val}" - - -async def tool3(some_val: int, some_other_val: str) -> str: - """Tool 3 docstring.""" - return [ - {"key_1": some_val, "key_2": "foo"}, - {"key_1": some_other_val, "key_2": "baz"}, - ] - - -async def tool4(some_val: int, some_other_val: str) -> str: - """Tool 4 docstring.""" - return [ - {"type": "image_url", "image_url": {"url": "abdc"}}, - ] - - -@dec_tool -def tool5(some_val: int) -> NoReturn: - """Tool 5 docstring.""" - msg = "Test error" - raise ToolException(msg) - - -tool5.handle_tool_error = "foo" - - -async def test_tool_node() -> None: - """Test tool node.""" - result = ToolNode([tool1]).invoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool1", - "args": {"some_val": 1, "some_other_val": "foo"}, - "id": "some 0", - } - ], - ) - ] - } - ) - - tool_message: ToolMessage = result["messages"][-1] - assert tool_message.type == "tool" - assert tool_message.content == "1 - foo" - assert tool_message.tool_call_id == "some 0" - - result2 = await ToolNode([tool2]).ainvoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool2", - "args": {"some_val": 2, "some_other_val": "bar"}, - "id": "some 1", - } - ], - ) - ] - } - ) - - tool_message: ToolMessage = result2["messages"][-1] - assert tool_message.type == "tool" - assert tool_message.content == "tool2: 2 - bar" - - # list of dicts tool content - result3 = await ToolNode([tool3]).ainvoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool3", - "args": {"some_val": 2, "some_other_val": "bar"}, - "id": "some 2", - } - ], - ) - ] - } - ) - tool_message: ToolMessage = result3["messages"][-1] - assert tool_message.type == "tool" - assert ( - tool_message.content == '[{"key_1": 2, "key_2": "foo"}, {"key_1": "bar", "key_2": "baz"}]' - ) - assert tool_message.tool_call_id == "some 2" - - # list of content blocks tool content - result4 = await ToolNode([tool4]).ainvoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool4", - "args": {"some_val": 2, "some_other_val": "bar"}, - "id": "some 3", - } - ], - ) - ] - } - ) - tool_message: ToolMessage = result4["messages"][-1] - assert tool_message.type == "tool" - assert tool_message.content == [{"type": "image_url", "image_url": {"url": "abdc"}}] - assert tool_message.tool_call_id == "some 3" - - -async def test_tool_node_tool_call_input() -> None: - # Single tool call - tool_call_1 = { - "name": "tool1", - "args": {"some_val": 1, "some_other_val": "foo"}, - "id": "some 0", - "type": "tool_call", - } - result = ToolNode([tool1]).invoke([tool_call_1]) - assert result["messages"] == [ - ToolMessage(content="1 - foo", tool_call_id="some 0", name="tool1"), - ] - - # Multiple tool calls - tool_call_2 = { - "name": "tool1", - "args": {"some_val": 2, "some_other_val": "bar"}, - "id": "some 1", - "type": "tool_call", - } - result = ToolNode([tool1]).invoke([tool_call_1, tool_call_2]) - assert result["messages"] == [ - ToolMessage(content="1 - foo", tool_call_id="some 0", name="tool1"), - ToolMessage(content="2 - bar", tool_call_id="some 1", name="tool1"), - ] - - # Test with unknown tool - tool_call_3 = tool_call_1.copy() - tool_call_3["name"] = "tool2" - result = ToolNode([tool1]).invoke([tool_call_1, tool_call_3]) - assert result["messages"] == [ - ToolMessage(content="1 - foo", tool_call_id="some 0", name="tool1"), - ToolMessage( - content="Error: tool2 is not a valid tool, try one of [tool1].", - name="tool2", - tool_call_id="some 0", - status="error", - ), - ] - - -def test_tool_node_error_handling_default_invocation() -> None: - tn = ToolNode([tool1]) - result = tn.invoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool1", - "args": {"invalid": 0, "args": "foo"}, - "id": "some id", - }, - ], - ) - ] - } - ) - - assert all(m.type == "tool" for m in result["messages"]) - assert all(m.status == "error" for m in result["messages"]) - assert ( - "Error invoking tool 'tool1' with kwargs {'invalid': 0, 'args': 'foo'} with error:\n" - in result["messages"][0].content - ) - - -def test_tool_node_error_handling_default_exception() -> None: - tn = ToolNode([tool1]) - with pytest.raises(ValueError): - tn.invoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool1", - "args": {"some_val": 0, "some_other_val": "foo"}, - "id": "some id", - }, - ], - ) - ] - } - ) - - -async def test_tool_node_error_handling() -> None: - def handle_all(e: ValueError | ToolException | ToolInvocationError): - return TOOL_CALL_ERROR_TEMPLATE.format(error=repr(e)) - - # test catching all exceptions, via: - # - handle_tool_errors = True - # - passing a tuple of all exceptions - # - passing a callable with all exceptions in the signature - for handle_tool_errors in ( - True, - (ValueError, ToolException, ToolInvocationError), - handle_all, - ): - result_error = await ToolNode( - [tool1, tool2, tool3], handle_tool_errors=handle_tool_errors - ).ainvoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool1", - "args": {"some_val": 0, "some_other_val": "foo"}, - "id": "some id", - }, - { - "name": "tool2", - "args": {"some_val": 0, "some_other_val": "bar"}, - "id": "some other id", - }, - { - "name": "tool3", - "args": {"some_val": 0}, - "id": "another id", - }, - ], - ) - ] - } - ) - - assert all(m.type == "tool" for m in result_error["messages"]) - assert all(m.status == "error" for m in result_error["messages"]) - assert ( - result_error["messages"][0].content - == f"Error: {ValueError('Test error')!r}\n Please fix your mistakes." - ) - assert ( - result_error["messages"][1].content - == f"Error: {ToolException('Test error')!r}\n Please fix your mistakes." - ) - assert ( - "ValidationError" in result_error["messages"][2].content - or "validation error" in result_error["messages"][2].content - ) - - assert result_error["messages"][0].tool_call_id == "some id" - assert result_error["messages"][1].tool_call_id == "some other id" - assert result_error["messages"][2].tool_call_id == "another id" - - -async def test_tool_node_error_handling_callable() -> None: - def handle_value_error(e: ValueError) -> str: - return "Value error" - - def handle_tool_exception(e: ToolException) -> str: - return "Tool exception" - - for handle_tool_errors in ("Value error", handle_value_error): - result_error = await ToolNode([tool1], handle_tool_errors=handle_tool_errors).ainvoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool1", - "args": {"some_val": 0, "some_other_val": "foo"}, - "id": "some id", - }, - ], - ) - ] - } - ) - tool_message: ToolMessage = result_error["messages"][-1] - assert tool_message.type == "tool" - assert tool_message.status == "error" - assert tool_message.content == "Value error" - - # test raising for an unhandled exception, via: - # - passing a tuple of all exceptions - # - passing a callable with all exceptions in the signature - for handle_tool_errors in ((ValueError,), handle_value_error): - with pytest.raises(ToolException) as exc_info: - await ToolNode([tool1, tool2], handle_tool_errors=handle_tool_errors).ainvoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool1", - "args": {"some_val": 0, "some_other_val": "foo"}, - "id": "some id", - }, - { - "name": "tool2", - "args": {"some_val": 0, "some_other_val": "bar"}, - "id": "some other id", - }, - ], - ) - ] - } - ) - assert str(exc_info.value) == "Test error" - - for handle_tool_errors in ((ToolException,), handle_tool_exception): - with pytest.raises(ValueError) as exc_info: - await ToolNode([tool1, tool2], handle_tool_errors=handle_tool_errors).ainvoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool1", - "args": {"some_val": 0, "some_other_val": "foo"}, - "id": "some id", - }, - { - "name": "tool2", - "args": {"some_val": 0, "some_other_val": "bar"}, - "id": "some other id", - }, - ], - ) - ] - } - ) - assert str(exc_info.value) == "Test error" - - -async def test_tool_node_handle_tool_errors_false() -> None: - with pytest.raises(ValueError) as exc_info: - ToolNode([tool1], handle_tool_errors=False).invoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool1", - "args": {"some_val": 0, "some_other_val": "foo"}, - "id": "some id", - } - ], - ) - ] - } - ) - - assert str(exc_info.value) == "Test error" - - with pytest.raises(ToolException): - await ToolNode([tool2], handle_tool_errors=False).ainvoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool2", - "args": {"some_val": 0, "some_other_val": "bar"}, - "id": "some id", - } - ], - ) - ] - } - ) - - assert str(exc_info.value) == "Test error" - - # test validation errors get raised if handle_tool_errors is False - with pytest.raises(ToolInvocationError): - ToolNode([tool1], handle_tool_errors=False).invoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool1", - "args": {"some_val": 0}, - "id": "some id", - } - ], - ) - ] - } - ) - - -def test_tool_node_individual_tool_error_handling() -> None: - # test error handling on individual tools (and that it overrides overall error handling!) - result_individual_tool_error_handler = ToolNode([tool5], handle_tool_errors="bar").invoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool5", - "args": {"some_val": 0}, - "id": "some 0", - } - ], - ) - ] - } - ) - - tool_message: ToolMessage = result_individual_tool_error_handler["messages"][-1] - assert tool_message.type == "tool" - assert tool_message.status == "error" - assert tool_message.content == "foo" - assert tool_message.tool_call_id == "some 0" - - -def test_tool_node_incorrect_tool_name() -> None: - result_incorrect_name = ToolNode([tool1, tool2]).invoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool3", - "args": {"some_val": 1, "some_other_val": "foo"}, - "id": "some 0", - } - ], - ) - ] - } - ) - - tool_message: ToolMessage = result_incorrect_name["messages"][-1] - assert tool_message.type == "tool" - assert tool_message.status == "error" - assert tool_message.content == "Error: tool3 is not a valid tool, try one of [tool1, tool2]." - assert tool_message.tool_call_id == "some 0" - - -def test_tool_node_node_interrupt() -> None: - def tool_interrupt(some_val: int) -> None: - """Tool docstring.""" - msg = "foo" - raise GraphBubbleUp(msg) - - def handle(e: GraphInterrupt) -> str: - return "handled" - - for handle_tool_errors in (True, (GraphBubbleUp,), "handled", handle, False): - node = ToolNode([tool_interrupt], handle_tool_errors=handle_tool_errors) - with pytest.raises(GraphBubbleUp) as exc_info: - node.invoke( - { - "messages": [ - AIMessage( - "hi?", - tool_calls=[ - { - "name": "tool_interrupt", - "args": {"some_val": 0}, - "id": "some 0", - } - ], - ) - ] - } - ) - assert exc_info.value == "foo" - - -@pytest.mark.parametrize("input_type", ["dict", "tool_calls"]) -async def test_tool_node_command(input_type: str) -> None: - from langchain_core.tools.base import InjectedToolCallId - - @dec_tool - def transfer_to_bob(tool_call_id: Annotated[str, InjectedToolCallId]): - """Transfer to Bob""" - return Command( - update={ - "messages": [ToolMessage(content="Transferred to Bob", tool_call_id=tool_call_id)] - }, - goto="bob", - graph=Command.PARENT, - ) - - @dec_tool - async def async_transfer_to_bob(tool_call_id: Annotated[str, InjectedToolCallId]): - """Transfer to Bob""" - return Command( - update={ - "messages": [ToolMessage(content="Transferred to Bob", tool_call_id=tool_call_id)] - }, - goto="bob", - graph=Command.PARENT, - ) - - class CustomToolSchema(BaseModel): - tool_call_id: Annotated[str, InjectedToolCallId] - - class MyCustomTool(BaseTool): - def _run(*args: Any, **kwargs: Any): - return Command( - update={ - "messages": [ - ToolMessage( - content="Transferred to Bob", - tool_call_id=kwargs["tool_call_id"], - ) - ] - }, - goto="bob", - graph=Command.PARENT, - ) - - async def _arun(*args: Any, **kwargs: Any): - return Command( - update={ - "messages": [ - ToolMessage( - content="Transferred to Bob", - tool_call_id=kwargs["tool_call_id"], - ) - ] - }, - goto="bob", - graph=Command.PARENT, - ) - - custom_tool = MyCustomTool( - name="custom_transfer_to_bob", - description="Transfer to bob", - args_schema=CustomToolSchema, - ) - async_custom_tool = MyCustomTool( - name="async_custom_transfer_to_bob", - description="Transfer to bob", - args_schema=CustomToolSchema, - ) - - # test mixing regular tools and tools returning commands - def add(a: int, b: int) -> int: - """Add two numbers""" - return a + b - - tool_calls = [ - {"args": {"a": 1, "b": 2}, "id": "1", "name": "add", "type": "tool_call"}, - {"args": {}, "id": "2", "name": "transfer_to_bob", "type": "tool_call"}, - ] - if input_type == "dict": - input_ = {"messages": [AIMessage("", tool_calls=tool_calls)]} - elif input_type == "tool_calls": - input_ = tool_calls - result = ToolNode([add, transfer_to_bob]).invoke(input_) - - assert result == [ - { - "messages": [ - ToolMessage( - content="3", - tool_call_id="1", - name="add", - ) - ] - }, - Command( - update={ - "messages": [ - ToolMessage( - content="Transferred to Bob", - tool_call_id="2", - name="transfer_to_bob", - ) - ] - }, - goto="bob", - graph=Command.PARENT, - ), - ] - - # test tools returning commands - - # test sync tools - for tool in [transfer_to_bob, custom_tool]: - result = ToolNode([tool]).invoke( - {"messages": [AIMessage("", tool_calls=[{"args": {}, "id": "1", "name": tool.name}])]} - ) - assert result == [ - Command( - update={ - "messages": [ - ToolMessage( - content="Transferred to Bob", - tool_call_id="1", - name=tool.name, - ) - ] - }, - goto="bob", - graph=Command.PARENT, - ) - ] - - # test async tools - for tool in [async_transfer_to_bob, async_custom_tool]: - result = await ToolNode([tool]).ainvoke( - {"messages": [AIMessage("", tool_calls=[{"args": {}, "id": "1", "name": tool.name}])]} - ) - assert result == [ - Command( - update={ - "messages": [ - ToolMessage( - content="Transferred to Bob", - tool_call_id="1", - name=tool.name, - ) - ] - }, - goto="bob", - graph=Command.PARENT, - ) - ] - - # test multiple commands - result = ToolNode([transfer_to_bob, custom_tool]).invoke( - { - "messages": [ - AIMessage( - "", - tool_calls=[ - {"args": {}, "id": "1", "name": "transfer_to_bob"}, - {"args": {}, "id": "2", "name": "custom_transfer_to_bob"}, - ], - ) - ] - } - ) - assert result == [ - Command( - update={ - "messages": [ - ToolMessage( - content="Transferred to Bob", - tool_call_id="1", - name="transfer_to_bob", - ) - ] - }, - goto="bob", - graph=Command.PARENT, - ), - Command( - update={ - "messages": [ - ToolMessage( - content="Transferred to Bob", - tool_call_id="2", - name="custom_transfer_to_bob", - ) - ] - }, - goto="bob", - graph=Command.PARENT, - ), - ] - - # test validation (mismatch between input type and command.update type) - with pytest.raises(ValueError): - - @dec_tool - def list_update_tool(tool_call_id: Annotated[str, InjectedToolCallId]): - """My tool""" - return Command(update=[ToolMessage(content="foo", tool_call_id=tool_call_id)]) - - ToolNode([list_update_tool]).invoke( - { - "messages": [ - AIMessage( - "", - tool_calls=[{"args": {}, "id": "1", "name": "list_update_tool"}], - ) - ] - } - ) - - # test validation (missing tool message in the update for current graph) - with pytest.raises(ValueError): - - @dec_tool - def no_update_tool(): - """My tool""" - return Command(update={"messages": []}) - - ToolNode([no_update_tool]).invoke( - { - "messages": [ - AIMessage( - "", - tool_calls=[{"args": {}, "id": "1", "name": "no_update_tool"}], - ) - ] - } - ) - - # test validation (tool message with a wrong tool call ID) - with pytest.raises(ValueError): - - @dec_tool - def mismatching_tool_call_id_tool(): - """My tool""" - return Command(update={"messages": [ToolMessage(content="foo", tool_call_id="2")]}) - - ToolNode([mismatching_tool_call_id_tool]).invoke( - { - "messages": [ - AIMessage( - "", - tool_calls=[ - { - "args": {}, - "id": "1", - "name": "mismatching_tool_call_id_tool", - } - ], - ) - ] - } - ) - - # test validation (missing tool message in the update for parent graph is OK) - @dec_tool - def node_update_parent_tool(): - """No update""" - return Command(update={"messages": []}, graph=Command.PARENT) - - assert ToolNode([node_update_parent_tool]).invoke( - { - "messages": [ - AIMessage( - "", - tool_calls=[{"args": {}, "id": "1", "name": "node_update_parent_tool"}], - ) - ] - } - ) == [Command(update={"messages": []}, graph=Command.PARENT)] - - -async def test_tool_node_command_list_input() -> None: - from langchain_core.tools.base import InjectedToolCallId - - @dec_tool - def transfer_to_bob(tool_call_id: Annotated[str, InjectedToolCallId]): - """Transfer to Bob""" - return Command( - update=[ToolMessage(content="Transferred to Bob", tool_call_id=tool_call_id)], - goto="bob", - graph=Command.PARENT, - ) - - @dec_tool - async def async_transfer_to_bob(tool_call_id: Annotated[str, InjectedToolCallId]): - """Transfer to Bob""" - return Command( - update=[ToolMessage(content="Transferred to Bob", tool_call_id=tool_call_id)], - goto="bob", - graph=Command.PARENT, - ) - - class CustomToolSchema(BaseModel): - tool_call_id: Annotated[str, InjectedToolCallId] - - class MyCustomTool(BaseTool): - def _run(*args: Any, **kwargs: Any): - return Command( - update=[ - ToolMessage( - content="Transferred to Bob", - tool_call_id=kwargs["tool_call_id"], - ) - ], - goto="bob", - graph=Command.PARENT, - ) - - async def _arun(*args: Any, **kwargs: Any): - return Command( - update=[ - ToolMessage( - content="Transferred to Bob", - tool_call_id=kwargs["tool_call_id"], - ) - ], - goto="bob", - graph=Command.PARENT, - ) - - custom_tool = MyCustomTool( - name="custom_transfer_to_bob", - description="Transfer to bob", - args_schema=CustomToolSchema, - ) - async_custom_tool = MyCustomTool( - name="async_custom_transfer_to_bob", - description="Transfer to bob", - args_schema=CustomToolSchema, - ) - - # test mixing regular tools and tools returning commands - def add(a: int, b: int) -> int: - """Add two numbers""" - return a + b - - result = ToolNode([add, transfer_to_bob]).invoke( - [ - AIMessage( - "", - tool_calls=[ - {"args": {"a": 1, "b": 2}, "id": "1", "name": "add"}, - {"args": {}, "id": "2", "name": "transfer_to_bob"}, - ], - ) - ] - ) - - assert result == [ - [ - ToolMessage( - content="3", - tool_call_id="1", - name="add", - ) - ], - Command( - update=[ - ToolMessage( - content="Transferred to Bob", - tool_call_id="2", - name="transfer_to_bob", - ) - ], - goto="bob", - graph=Command.PARENT, - ), - ] - - # test tools returning commands - - # test sync tools - for tool in [transfer_to_bob, custom_tool]: - result = ToolNode([tool]).invoke( - [AIMessage("", tool_calls=[{"args": {}, "id": "1", "name": tool.name}])] - ) - assert result == [ - Command( - update=[ - ToolMessage( - content="Transferred to Bob", - tool_call_id="1", - name=tool.name, - ) - ], - goto="bob", - graph=Command.PARENT, - ) - ] - - # test async tools - for tool in [async_transfer_to_bob, async_custom_tool]: - result = await ToolNode([tool]).ainvoke( - [AIMessage("", tool_calls=[{"args": {}, "id": "1", "name": tool.name}])] - ) - assert result == [ - Command( - update=[ - ToolMessage( - content="Transferred to Bob", - tool_call_id="1", - name=tool.name, - ) - ], - goto="bob", - graph=Command.PARENT, - ) - ] - - # test multiple commands - result = ToolNode([transfer_to_bob, custom_tool]).invoke( - [ - AIMessage( - "", - tool_calls=[ - {"args": {}, "id": "1", "name": "transfer_to_bob"}, - {"args": {}, "id": "2", "name": "custom_transfer_to_bob"}, - ], - ) - ] - ) - assert result == [ - Command( - update=[ - ToolMessage( - content="Transferred to Bob", - tool_call_id="1", - name="transfer_to_bob", - ) - ], - goto="bob", - graph=Command.PARENT, - ), - Command( - update=[ - ToolMessage( - content="Transferred to Bob", - tool_call_id="2", - name="custom_transfer_to_bob", - ) - ], - goto="bob", - graph=Command.PARENT, - ), - ] - - # test validation (mismatch between input type and command.update type) - with pytest.raises(ValueError): - - @dec_tool - def list_update_tool(tool_call_id: Annotated[str, InjectedToolCallId]): - """My tool""" - return Command( - update={"messages": [ToolMessage(content="foo", tool_call_id=tool_call_id)]} - ) - - ToolNode([list_update_tool]).invoke( - [ - AIMessage( - "", - tool_calls=[{"args": {}, "id": "1", "name": "list_update_tool"}], - ) - ] - ) - - # test validation (missing tool message in the update for current graph) - with pytest.raises(ValueError): - - @dec_tool - def no_update_tool(): - """My tool""" - return Command(update=[]) - - ToolNode([no_update_tool]).invoke( - [ - AIMessage( - "", - tool_calls=[{"args": {}, "id": "1", "name": "no_update_tool"}], - ) - ] - ) - - # test validation (tool message with a wrong tool call ID) - with pytest.raises(ValueError): - - @dec_tool - def mismatching_tool_call_id_tool(): - """My tool""" - return Command(update=[ToolMessage(content="foo", tool_call_id="2")]) - - ToolNode([mismatching_tool_call_id_tool]).invoke( - [ - AIMessage( - "", - tool_calls=[{"args": {}, "id": "1", "name": "mismatching_tool_call_id_tool"}], - ) - ] - ) - - # test validation (missing tool message in the update for parent graph is OK) - @dec_tool - def node_update_parent_tool(): - """No update""" - return Command(update=[], graph=Command.PARENT) - - assert ToolNode([node_update_parent_tool]).invoke( - [ - AIMessage( - "", - tool_calls=[{"args": {}, "id": "1", "name": "node_update_parent_tool"}], - ) - ] - ) == [Command(update=[], graph=Command.PARENT)] - - -def test_tool_node_parent_command_with_send() -> None: - from langchain_core.tools.base import InjectedToolCallId - - @dec_tool - def transfer_to_alice(tool_call_id: Annotated[str, InjectedToolCallId]): - """Transfer to Alice""" - return Command( - goto=[ - Send( - "alice", - { - "messages": [ - ToolMessage( - content="Transferred to Alice", - name="transfer_to_alice", - tool_call_id=tool_call_id, - ) - ] - }, - ) - ], - graph=Command.PARENT, - ) - - @dec_tool - def transfer_to_bob(tool_call_id: Annotated[str, InjectedToolCallId]): - """Transfer to Bob""" - return Command( - goto=[ - Send( - "bob", - { - "messages": [ - ToolMessage( - content="Transferred to Bob", - name="transfer_to_bob", - tool_call_id=tool_call_id, - ) - ] - }, - ) - ], - graph=Command.PARENT, - ) - - tool_calls = [ - {"args": {}, "id": "1", "name": "transfer_to_alice", "type": "tool_call"}, - {"args": {}, "id": "2", "name": "transfer_to_bob", "type": "tool_call"}, - ] - - result = ToolNode([transfer_to_alice, transfer_to_bob]).invoke( - [AIMessage("", tool_calls=tool_calls)] - ) - - assert result == [ - Command( - goto=[ - Send( - "alice", - { - "messages": [ - ToolMessage( - content="Transferred to Alice", - name="transfer_to_alice", - tool_call_id="1", - ) - ] - }, - ), - Send( - "bob", - { - "messages": [ - ToolMessage( - content="Transferred to Bob", - name="transfer_to_bob", - tool_call_id="2", - ) - ] - }, - ), - ], - graph=Command.PARENT, - ) - ] - - -async def test_tool_node_command_remove_all_messages() -> None: - from langchain_core.tools.base import InjectedToolCallId - - @dec_tool - def remove_all_messages_tool(tool_call_id: Annotated[str, InjectedToolCallId]): - """A tool that removes all messages.""" - return Command(update={"messages": [RemoveMessage(id=REMOVE_ALL_MESSAGES)]}) - - tool_node = ToolNode([remove_all_messages_tool]) - tool_call = { - "name": "remove_all_messages_tool", - "args": {}, - "id": "tool_call_123", - } - result = await tool_node.ainvoke({"messages": [AIMessage(content="", tool_calls=[tool_call])]}) - - assert isinstance(result, list) - assert len(result) == 1 - command = result[0] - assert isinstance(command, Command) - assert command.update == {"messages": [RemoveMessage(id=REMOVE_ALL_MESSAGES)]} - - -class _InjectStateSchema(TypedDict): - messages: list - foo: str - - -class _InjectedStatePydanticSchema(BaseModelV1): - messages: list - foo: str - - -class _InjectedStatePydanticV2Schema(BaseModel): - messages: list - foo: str - - -@dataclasses.dataclass -class _InjectedStateDataclassSchema: - messages: list - foo: str - - -T = TypeVar("T") - - -@pytest.mark.parametrize( - "schema_", - [ - _InjectStateSchema, - _InjectedStatePydanticSchema, - _InjectedStatePydanticV2Schema, - _InjectedStateDataclassSchema, - ], -) -def test_tool_node_inject_state(schema_: type[T]) -> None: - def tool1(some_val: int, state: Annotated[T, InjectedState]) -> str: - """Tool 1 docstring.""" - if isinstance(state, dict): - return state["foo"] - return state.foo - - def tool2(some_val: int, state: Annotated[T, InjectedState()]) -> str: - """Tool 2 docstring.""" - if isinstance(state, dict): - return state["foo"] - return state.foo - - def tool3( - some_val: int, - foo: Annotated[str, InjectedState("foo")], - msgs: Annotated[list[AnyMessage], InjectedState("messages")], - ) -> str: - """Tool 1 docstring.""" - return foo - - def tool4(some_val: int, msgs: Annotated[list[AnyMessage], InjectedState("messages")]) -> str: - """Tool 1 docstring.""" - return msgs[0].content - - node = ToolNode([tool1, tool2, tool3, tool4], handle_tool_errors=True) - for tool_name in ("tool1", "tool2", "tool3"): - tool_call = { - "name": tool_name, - "args": {"some_val": 1}, - "id": "some 0", - "type": "tool_call", - } - msg = AIMessage("hi?", tool_calls=[tool_call]) - result = node.invoke(schema_(messages=[msg], foo="bar")) - tool_message = result["messages"][-1] - assert tool_message.content == "bar", f"Failed for tool={tool_name}" - - if tool_name == "tool3": - failure_input = None - with contextlib.suppress(Exception): - failure_input = schema_(messages=[msg], notfoo="bar") - if failure_input is not None: - with pytest.raises(KeyError): - node.invoke(failure_input) - - with pytest.raises(ValueError): - node.invoke([msg]) - else: - failure_input = None - try: - failure_input = schema_(messages=[msg], notfoo="bar") - except Exception: - # We'd get a validation error from pydantic state and wouldn't make it to the node - # anyway - pass - if failure_input is not None: - messages_ = node.invoke(failure_input) - tool_message = messages_["messages"][-1] - assert "KeyError" in tool_message.content - tool_message = node.invoke([msg])[-1] - assert "KeyError" in tool_message.content - - tool_call = { - "name": "tool4", - "args": {"some_val": 1}, - "id": "some 0", - "type": "tool_call", - } - msg = AIMessage("hi?", tool_calls=[tool_call]) - result = node.invoke(schema_(messages=[msg], foo="")) - tool_message = result["messages"][-1] - assert tool_message.content == "hi?" - - result = node.invoke([msg]) - tool_message = result[-1] - assert tool_message.content == "hi?" - - -def test_tool_node_inject_store() -> None: - store = InMemoryStore() - namespace = ("test",) - - def tool1(some_val: int, store: Annotated[BaseStore, InjectedStore()]) -> str: - """Tool 1 docstring.""" - store_val = store.get(namespace, "test_key").value["foo"] - return f"Some val: {some_val}, store val: {store_val}" - - def tool2(some_val: int, store: Annotated[BaseStore, InjectedStore()]) -> str: - """Tool 2 docstring.""" - store_val = store.get(namespace, "test_key").value["foo"] - return f"Some val: {some_val}, store val: {store_val}" - - def tool3( - some_val: int, - bar: Annotated[str, InjectedState("bar")], - store: Annotated[BaseStore, InjectedStore()], - ) -> str: - """Tool 3 docstring.""" - store_val = store.get(namespace, "test_key").value["foo"] - return f"Some val: {some_val}, store val: {store_val}, state val: {bar}" - - node = ToolNode([tool1, tool2, tool3], handle_tool_errors=True) - store.put(namespace, "test_key", {"foo": "bar"}) - - class State(MessagesState): - bar: str - - builder = StateGraph(State) - builder.add_node("tools", node) - builder.add_edge(START, "tools") - graph = builder.compile(store=store) - - for tool_name in ("tool1", "tool2"): - tool_call = { - "name": tool_name, - "args": {"some_val": 1}, - "id": "some 0", - "type": "tool_call", - } - msg = AIMessage("hi?", tool_calls=[tool_call]) - node_result = node.invoke({"messages": [msg]}, store=store) - graph_result = graph.invoke({"messages": [msg]}) - for result in (node_result, graph_result): - result["messages"][-1] - tool_message = result["messages"][-1] - assert tool_message.content == "Some val: 1, store val: bar", ( - f"Failed for tool={tool_name}" - ) - - tool_call = { - "name": "tool3", - "args": {"some_val": 1}, - "id": "some 0", - "type": "tool_call", - } - msg = AIMessage("hi?", tool_calls=[tool_call]) - node_result = node.invoke({"messages": [msg], "bar": "baz"}, store=store) - graph_result = graph.invoke({"messages": [msg], "bar": "baz"}) - for result in (node_result, graph_result): - result["messages"][-1] - tool_message = result["messages"][-1] - assert tool_message.content == "Some val: 1, store val: bar, state val: baz", ( - f"Failed for tool={tool_name}" - ) - - # test injected store without passing store to compiled graph - failing_graph = builder.compile() - with pytest.raises(ValueError): - failing_graph.invoke({"messages": [msg], "bar": "baz"}) - - -def test_tool_node_ensure_utf8() -> None: - @dec_tool - def get_day_list(days: list[str]) -> list[str]: - """choose days""" - return days - - data = ["ζ˜ŸζœŸδΈ€", "ζ°΄ζ›œζ—₯", "λͺ©μš”일", "Friday"] - tools = [get_day_list] - tool_calls = [ToolCall(name=get_day_list.name, args={"days": data}, id="test_id")] - outputs: list[ToolMessage] = ToolNode(tools).invoke( - [AIMessage(content="", tool_calls=tool_calls)] - ) - assert outputs[0].content == json.dumps(data, ensure_ascii=False) - - -def test_tool_node_messages_key() -> None: - @dec_tool - def add(a: int, b: int) -> int: - """Adds a and b.""" - return a + b - - model = FakeToolCallingModel( - tool_calls=[[ToolCall(name=add.name, args={"a": 1, "b": 2}, id="test_id")]] - ) - - class State(TypedDict): - subgraph_messages: Annotated[list[AnyMessage], add_messages] - - def call_model(state: State) -> dict[str, Any]: - response = model.invoke(state["subgraph_messages"]) - model.tool_calls = [] - return {"subgraph_messages": response} - - builder = StateGraph(State) - builder.add_node("agent", call_model) - builder.add_node("tools", ToolNode([add], messages_key="subgraph_messages")) - builder.add_conditional_edges( - "agent", partial(tools_condition, messages_key="subgraph_messages") - ) - builder.add_edge(START, "agent") - builder.add_edge("tools", "agent") - - graph = builder.compile() - result = graph.invoke({"subgraph_messages": [HumanMessage(content="hi")]}) - assert result["subgraph_messages"] == [ - _AnyIdHumanMessage(content="hi"), - AIMessage( - content="hi", - id="0", - tool_calls=[ToolCall(name=add.name, args={"a": 1, "b": 2}, id="test_id")], - ), - _AnyIdToolMessage(content="3", name=add.name, tool_call_id="test_id"), - AIMessage(content="hi-hi-3", id="1"), - ] - - -def test_tool_node_stream_writer() -> None: - @dec_tool - def streaming_tool(x: int) -> str: - """Do something with writer.""" - my_writer = get_stream_writer() - for value in ["foo", "bar", "baz"]: - my_writer({"custom_tool_value": value}) - - return x - - tool_node = ToolNode([streaming_tool]) - graph = ( - StateGraph(MessagesState).add_node("tools", tool_node).add_edge(START, "tools").compile() - ) - - tool_call = { - "name": "streaming_tool", - "args": {"x": 1}, - "id": "1", - "type": "tool_call", - } - inputs = { - "messages": [AIMessage("", tool_calls=[tool_call])], - } - - assert list(graph.stream(inputs, stream_mode="custom")) == [ - {"custom_tool_value": "foo"}, - {"custom_tool_value": "bar"}, - {"custom_tool_value": "baz"}, - ] - assert list(graph.stream(inputs, stream_mode=["custom", "updates"])) == [ - ("custom", {"custom_tool_value": "foo"}), - ("custom", {"custom_tool_value": "bar"}), - ("custom", {"custom_tool_value": "baz"}), - ( - "updates", - { - "tools": { - "messages": [ - _AnyIdToolMessage( - content="1", - name="streaming_tool", - tool_call_id="1", - ), - ], - }, - }, - ), - ] diff --git a/libs/langchain_v1/tests/unit_tests/chat_models/test_chat_models.py b/libs/langchain_v1/tests/unit_tests/chat_models/test_chat_models.py index 75b0a86b121..9c1f6be5558 100644 --- a/libs/langchain_v1/tests/unit_tests/chat_models/test_chat_models.py +++ b/libs/langchain_v1/tests/unit_tests/chat_models/test_chat_models.py @@ -33,7 +33,7 @@ def test_all_imports() -> None: ("model_name", "model_provider"), [ ("gpt-4o", "openai"), - ("claude-3-opus-20240229", "anthropic"), + ("claude-opus-4-1", "anthropic"), ("accounts/fireworks/models/mixtral-8x7b-instruct", "fireworks"), ("mixtral-8x7b-32768", "groq"), ], @@ -241,17 +241,17 @@ def test_configurable_with_default() -> None: model_with_config = model_with_tools.with_config( RunnableConfig(tags=["foo"]), - configurable={"bar_model": "claude-3-7-sonnet-20250219"}, + configurable={"bar_model": "claude-sonnet-4-5-20250929"}, ) - assert model_with_config.model == "claude-3-7-sonnet-20250219" # type: ignore[attr-defined] + assert model_with_config.model == "claude-sonnet-4-5-20250929" # type: ignore[attr-defined] assert model_with_config.model_dump() == { # type: ignore[attr-defined] "name": None, "bound": { "name": None, "disable_streaming": False, - "model": "claude-3-7-sonnet-20250219", + "model": "claude-sonnet-4-5-20250929", "mcp_servers": None, "max_tokens": 64000, "temperature": None, diff --git a/libs/langchain_v1/tests/unit_tests/embeddings/test_caching.py b/libs/langchain_v1/tests/unit_tests/embeddings/test_caching.py deleted file mode 100644 index 913da9f90a8..00000000000 --- a/libs/langchain_v1/tests/unit_tests/embeddings/test_caching.py +++ /dev/null @@ -1,245 +0,0 @@ -"""Embeddings tests.""" - -import contextlib -import hashlib -import importlib -import warnings - -import pytest -from langchain_core.embeddings import Embeddings - -from langchain.embeddings import CacheBackedEmbeddings -from langchain.storage.in_memory import InMemoryStore - - -class MockEmbeddings(Embeddings): - def embed_documents(self, texts: list[str]) -> list[list[float]]: - # Simulate embedding documents - embeddings: list[list[float]] = [] - for text in texts: - if text == "RAISE_EXCEPTION": - msg = "Simulated embedding failure" - raise ValueError(msg) - embeddings.append([len(text), len(text) + 1]) - return embeddings - - def embed_query(self, text: str) -> list[float]: - # Simulate embedding a query - return [5.0, 6.0] - - -@pytest.fixture -def cache_embeddings() -> CacheBackedEmbeddings: - """Create a cache backed embeddings.""" - store = InMemoryStore() - embeddings = MockEmbeddings() - return CacheBackedEmbeddings.from_bytes_store( - embeddings, - store, - namespace="test_namespace", - ) - - -@pytest.fixture -def cache_embeddings_batch() -> CacheBackedEmbeddings: - """Create a cache backed embeddings with a batch_size of 3.""" - store = InMemoryStore() - embeddings = MockEmbeddings() - return CacheBackedEmbeddings.from_bytes_store( - embeddings, - store, - namespace="test_namespace", - batch_size=3, - ) - - -@pytest.fixture -def cache_embeddings_with_query() -> CacheBackedEmbeddings: - """Create a cache backed embeddings with query caching.""" - doc_store = InMemoryStore() - query_store = InMemoryStore() - embeddings = MockEmbeddings() - return CacheBackedEmbeddings.from_bytes_store( - embeddings, - document_embedding_cache=doc_store, - namespace="test_namespace", - query_embedding_cache=query_store, - ) - - -def test_embed_documents(cache_embeddings: CacheBackedEmbeddings) -> None: - texts = ["1", "22", "a", "333"] - vectors = cache_embeddings.embed_documents(texts) - expected_vectors: list[list[float]] = [[1, 2.0], [2.0, 3.0], [1.0, 2.0], [3.0, 4.0]] - assert vectors == expected_vectors - keys = list(cache_embeddings.document_embedding_store.yield_keys()) - assert len(keys) == 4 - # UUID is expected to be the same for the same text - assert keys[0] == "test_namespace812b86c1-8ebf-5483-95c6-c95cf2b52d12" - - -def test_embed_documents_batch(cache_embeddings_batch: CacheBackedEmbeddings) -> None: - # "RAISE_EXCEPTION" forces a failure in batch 2 - texts = ["1", "22", "a", "333", "RAISE_EXCEPTION"] - with contextlib.suppress(ValueError): - cache_embeddings_batch.embed_documents(texts) - keys = list(cache_embeddings_batch.document_embedding_store.yield_keys()) - # only the first batch of three embeddings should exist - assert len(keys) == 3 - # UUID is expected to be the same for the same text - assert keys[0] == "test_namespace812b86c1-8ebf-5483-95c6-c95cf2b52d12" - - -def test_embed_query(cache_embeddings: CacheBackedEmbeddings) -> None: - text = "query_text" - vector = cache_embeddings.embed_query(text) - expected_vector = [5.0, 6.0] - assert vector == expected_vector - assert cache_embeddings.query_embedding_store is None - - -def test_embed_cached_query(cache_embeddings_with_query: CacheBackedEmbeddings) -> None: - text = "query_text" - vector = cache_embeddings_with_query.embed_query(text) - expected_vector = [5.0, 6.0] - assert vector == expected_vector - keys = list(cache_embeddings_with_query.query_embedding_store.yield_keys()) # type: ignore[union-attr] - assert len(keys) == 1 - assert keys[0] == "test_namespace89ec3dae-a4d9-5636-a62e-ff3b56cdfa15" - - -async def test_aembed_documents(cache_embeddings: CacheBackedEmbeddings) -> None: - texts = ["1", "22", "a", "333"] - vectors = await cache_embeddings.aembed_documents(texts) - expected_vectors: list[list[float]] = [[1, 2.0], [2.0, 3.0], [1.0, 2.0], [3.0, 4.0]] - assert vectors == expected_vectors - keys = [key async for key in cache_embeddings.document_embedding_store.ayield_keys()] - assert len(keys) == 4 - # UUID is expected to be the same for the same text - assert keys[0] == "test_namespace812b86c1-8ebf-5483-95c6-c95cf2b52d12" - - -async def test_aembed_documents_batch( - cache_embeddings_batch: CacheBackedEmbeddings, -) -> None: - # "RAISE_EXCEPTION" forces a failure in batch 2 - texts = ["1", "22", "a", "333", "RAISE_EXCEPTION"] - with contextlib.suppress(ValueError): - await cache_embeddings_batch.aembed_documents(texts) - keys = [key async for key in cache_embeddings_batch.document_embedding_store.ayield_keys()] - # only the first batch of three embeddings should exist - assert len(keys) == 3 - # UUID is expected to be the same for the same text - assert keys[0] == "test_namespace812b86c1-8ebf-5483-95c6-c95cf2b52d12" - - -async def test_aembed_query(cache_embeddings: CacheBackedEmbeddings) -> None: - text = "query_text" - vector = await cache_embeddings.aembed_query(text) - expected_vector = [5.0, 6.0] - assert vector == expected_vector - - -async def test_aembed_query_cached( - cache_embeddings_with_query: CacheBackedEmbeddings, -) -> None: - text = "query_text" - await cache_embeddings_with_query.aembed_query(text) - keys = list(cache_embeddings_with_query.query_embedding_store.yield_keys()) # type: ignore[union-attr] - assert len(keys) == 1 - assert keys[0] == "test_namespace89ec3dae-a4d9-5636-a62e-ff3b56cdfa15" - - -def test_blake2b_encoder() -> None: - """Test that the blake2b encoder is used to encode keys in the cache store.""" - store = InMemoryStore() - emb = MockEmbeddings() - cbe = CacheBackedEmbeddings.from_bytes_store( - emb, - store, - namespace="ns_", - key_encoder="blake2b", - ) - - text = "blake" - cbe.embed_documents([text]) - - # rebuild the key exactly as the library does - expected_key = "ns_" + hashlib.blake2b(text.encode()).hexdigest() - assert list(cbe.document_embedding_store.yield_keys()) == [expected_key] - - -def test_sha256_encoder() -> None: - """Test that the sha256 encoder is used to encode keys in the cache store.""" - store = InMemoryStore() - emb = MockEmbeddings() - cbe = CacheBackedEmbeddings.from_bytes_store( - emb, - store, - namespace="ns_", - key_encoder="sha256", - ) - - text = "foo" - cbe.embed_documents([text]) - - # rebuild the key exactly as the library does - expected_key = "ns_" + hashlib.sha256(text.encode()).hexdigest() - assert list(cbe.document_embedding_store.yield_keys()) == [expected_key] - - -def test_sha512_encoder() -> None: - """Test that the sha512 encoder is used to encode keys in the cache store.""" - store = InMemoryStore() - emb = MockEmbeddings() - cbe = CacheBackedEmbeddings.from_bytes_store( - emb, - store, - namespace="ns_", - key_encoder="sha512", - ) - - text = "foo" - cbe.embed_documents([text]) - - # rebuild the key exactly as the library does - expected_key = "ns_" + hashlib.sha512(text.encode()).hexdigest() - assert list(cbe.document_embedding_store.yield_keys()) == [expected_key] - - -def test_sha1_warning_emitted_once() -> None: - """Test that a warning is emitted when using SHA-1 as the default key encoder.""" - module = importlib.import_module(CacheBackedEmbeddings.__module__) - - # Create a *temporary* MonkeyPatch object whose effects disappear - # automatically when the with-block exits. - with pytest.MonkeyPatch.context() as mp: - # We're monkey patching the module to reset the `_warned_about_sha1` flag - # which may have been set while testing other parts of the codebase. - mp.setattr(module, "_warned_about_sha1", False, raising=False) - - store = InMemoryStore() - emb = MockEmbeddings() - - with warnings.catch_warnings(record=True) as caught: - warnings.simplefilter("always") - CacheBackedEmbeddings.from_bytes_store(emb, store) # triggers warning - CacheBackedEmbeddings.from_bytes_store(emb, store) # silent - - sha1_msgs = [w for w in caught if "SHA-1" in str(w.message)] - assert len(sha1_msgs) == 1 - - -def test_custom_encoder() -> None: - """Test that a custom encoder can be used to encode keys in the cache store.""" - store = InMemoryStore() - emb = MockEmbeddings() - - def custom_upper(text: str) -> str: # very simple demo encoder - return "CUSTOM_" + text.upper() - - cbe = CacheBackedEmbeddings.from_bytes_store(emb, store, key_encoder=custom_upper) - txt = "x" - cbe.embed_documents([txt]) - - assert list(cbe.document_embedding_store.yield_keys()) == ["CUSTOM_X"] diff --git a/libs/langchain_v1/tests/unit_tests/embeddings/test_imports.py b/libs/langchain_v1/tests/unit_tests/embeddings/test_imports.py index e13af37bea6..80ecff96b98 100644 --- a/libs/langchain_v1/tests/unit_tests/embeddings/test_imports.py +++ b/libs/langchain_v1/tests/unit_tests/embeddings/test_imports.py @@ -1,7 +1,6 @@ from langchain import embeddings EXPECTED_ALL = [ - "CacheBackedEmbeddings", "Embeddings", "init_embeddings", ] diff --git a/libs/langchain_v1/tests/unit_tests/storage/test_imports.py b/libs/langchain_v1/tests/unit_tests/storage/test_imports.py deleted file mode 100644 index 83315291ac9..00000000000 --- a/libs/langchain_v1/tests/unit_tests/storage/test_imports.py +++ /dev/null @@ -1,12 +0,0 @@ -from langchain import storage - -EXPECTED_ALL = [ - "EncoderBackedStore", - "InMemoryStore", - "InMemoryByteStore", - "InvalidKeyException", -] - - -def test_all_imports() -> None: - assert set(storage.__all__) == set(EXPECTED_ALL) diff --git a/libs/langchain_v1/tests/unit_tests/tools/test_imports.py b/libs/langchain_v1/tests/unit_tests/tools/test_imports.py index 79c7371e12e..17b66c6f771 100644 --- a/libs/langchain_v1/tests/unit_tests/tools/test_imports.py +++ b/libs/langchain_v1/tests/unit_tests/tools/test_imports.py @@ -7,7 +7,7 @@ EXPECTED_ALL = { "InjectedToolArg", "InjectedToolCallId", "ToolException", - "ToolNode", + "ToolRuntime", "tool", } diff --git a/libs/langchain_v1/tests/unit_tests/tools/test_on_tool_call.py b/libs/langchain_v1/tests/unit_tests/tools/test_on_tool_call.py deleted file mode 100644 index f683e7909db..00000000000 --- a/libs/langchain_v1/tests/unit_tests/tools/test_on_tool_call.py +++ /dev/null @@ -1,1243 +0,0 @@ -"""Unit tests for tool call interceptor in ToolNode.""" - -from collections.abc import Callable - -import pytest -from langchain_core.messages import AIMessage, ToolCall, ToolMessage -from langchain_core.tools import tool -from langgraph.types import Command - -from langchain.tools.tool_node import ( - ToolCallRequest, - ToolNode, -) - -pytestmark = pytest.mark.anyio - - -@tool -def add(a: int, b: int) -> int: - """Add two numbers.""" - return a + b - - -@tool -def failing_tool(a: int) -> int: - """A tool that always fails.""" - msg = f"This tool always fails (input: {a})" - raise ValueError(msg) - - -@tool -def command_tool(goto: str) -> Command: - """A tool that returns a Command.""" - return Command(goto=goto) - - -def test_passthrough_handler() -> None: - """Test a simple passthrough handler that doesn't modify anything.""" - - def passthrough_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Simple passthrough handler.""" - return execute(request) - - tool_node = ToolNode([add], on_tool_call=passthrough_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_1", - } - ], - ) - ] - } - ) - - tool_message = result["messages"][-1] - assert isinstance(tool_message, ToolMessage) - assert tool_message.content == "3" - assert tool_message.tool_call_id == "call_1" - assert tool_message.status != "error" - - -async def test_passthrough_handler_async() -> None: - """Test passthrough handler with async tool.""" - - def passthrough_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Simple passthrough handler.""" - return execute(request) - - tool_node = ToolNode([add], on_tool_call=passthrough_handler) - - result = await tool_node.ainvoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 2, "b": 3}, - "id": "call_2", - } - ], - ) - ] - } - ) - - tool_message = result["messages"][-1] - assert isinstance(tool_message, ToolMessage) - assert tool_message.content == "5" - assert tool_message.tool_call_id == "call_2" - - -def test_modify_arguments() -> None: - """Test handler that modifies tool arguments before execution.""" - - def modify_args_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that doubles the input arguments.""" - # Modify the arguments - request.tool_call["args"]["a"] *= 2 - request.tool_call["args"]["b"] *= 2 - - return execute(request) - - tool_node = ToolNode([add], on_tool_call=modify_args_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_3", - } - ], - ) - ] - } - ) - - tool_message = result["messages"][-1] - assert isinstance(tool_message, ToolMessage) - # Original args were (1, 2), doubled to (2, 4), so result is 6 - assert tool_message.content == "6" - - -def test_handler_validation_no_return() -> None: - """Test that handler must return a result.""" - - def handler_with_explicit_none( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that executes and returns result.""" - return execute(request) - - tool_node = ToolNode([add], on_tool_call=handler_with_explicit_none) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_6", - } - ], - ) - ] - } - ) - - assert isinstance(result, dict) - messages = result["messages"] - assert len(messages) == 1 - assert isinstance(messages[0], ToolMessage) - assert messages[0].content == "3" - - -def test_handler_validation_no_yield() -> None: - """Test that handler that doesn't call execute returns None (bad behavior).""" - - def bad_handler( - _request: ToolCallRequest, - _execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that doesn't call execute - will cause type error.""" - # Don't call execute, just return None (invalid) - return None # type: ignore[return-value] - - tool_node = ToolNode([add], on_tool_call=bad_handler) - - # This will return None wrapped in messages - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_7", - } - ], - ) - ] - } - ) - - # Result contains None in messages (bad handler behavior) - assert isinstance(result, dict) - assert result["messages"][0] is None - - -def test_handler_with_handle_tool_errors_true() -> None: - """Test that handle_tool_errors=True works with on_tool_call handler.""" - - def passthrough_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Simple passthrough handler.""" - message = execute(request) - # When handle_tool_errors=True, errors should be converted to error messages - assert isinstance(message, ToolMessage) - assert message.status == "error" - return message - - tool_node = ToolNode([failing_tool], on_tool_call=passthrough_handler, handle_tool_errors=True) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "failing", - tool_calls=[ - { - "name": "failing_tool", - "args": {"a": 1}, - "id": "call_9", - } - ], - ) - ] - } - ) - - tool_message = result["messages"][-1] - assert isinstance(tool_message, ToolMessage) - assert tool_message.status == "error" - - -def test_multiple_tool_calls_with_handler() -> None: - """Test handler with multiple tool calls in one message.""" - call_count = 0 - - def counting_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that counts calls.""" - nonlocal call_count - call_count += 1 - return execute(request) - - tool_node = ToolNode([add], on_tool_call=counting_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding multiple", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_10", - }, - { - "name": "add", - "args": {"a": 3, "b": 4}, - "id": "call_11", - }, - { - "name": "add", - "args": {"a": 5, "b": 6}, - "id": "call_12", - }, - ], - ) - ] - } - ) - - # Handler should be called once for each tool call - assert call_count == 3 - - # Verify all results - messages = result["messages"] - assert len(messages) == 3 - assert all(isinstance(m, ToolMessage) for m in messages) - assert messages[0].content == "3" - assert messages[1].content == "7" - assert messages[2].content == "11" - - -def test_tool_call_request_dataclass() -> None: - """Test ToolCallRequest dataclass.""" - tool_call: ToolCall = {"name": "add", "args": {"a": 1, "b": 2}, "id": "call_1"} - state: dict = {"messages": []} - runtime = None - - request = ToolCallRequest(tool_call=tool_call, tool=add, state=state, runtime=runtime) - - assert request.tool_call == tool_call - assert request.tool == add - assert request.state == state - assert request.runtime is None - assert request.tool_call["name"] == "add" - - -async def test_handler_with_async_execution() -> None: - """Test handler works correctly with async tool execution.""" - - @tool - async def async_add(a: int, b: int) -> int: - """Async add two numbers.""" - return a + b - - def modifying_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that modifies arguments.""" - # Add 10 to both arguments - request.tool_call["args"]["a"] += 10 - request.tool_call["args"]["b"] += 10 - return execute(request) - - tool_node = ToolNode([async_add], on_tool_call=modifying_handler) - - result = await tool_node.ainvoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "async_add", - "args": {"a": 1, "b": 2}, - "id": "call_13", - } - ], - ) - ] - } - ) - - tool_message = result["messages"][-1] - assert isinstance(tool_message, ToolMessage) - # Original: 1 + 2 = 3, with modifications: 11 + 12 = 23 - assert tool_message.content == "23" - - -def test_short_circuit_with_tool_message() -> None: - """Test handler that returns ToolMessage to short-circuit tool execution.""" - - def short_circuit_handler( - request: ToolCallRequest, - _execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that returns cached result without executing tool.""" - # Return a ToolMessage directly instead of calling execute - return ToolMessage( - content="cached_result", - tool_call_id=request.tool_call["id"], - name=request.tool_call["name"], - ) - - tool_node = ToolNode([add], on_tool_call=short_circuit_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_16", - } - ], - ) - ] - } - ) - - tool_message = result["messages"][-1] - assert isinstance(tool_message, ToolMessage) - assert tool_message.content == "cached_result" - assert tool_message.tool_call_id == "call_16" - assert tool_message.name == "add" - - -async def test_short_circuit_with_tool_message_async() -> None: - """Test async handler that returns ToolMessage to short-circuit tool execution.""" - - def short_circuit_handler( - request: ToolCallRequest, - _execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that returns cached result without executing tool.""" - return ToolMessage( - content="async_cached_result", - tool_call_id=request.tool_call["id"], - name=request.tool_call["name"], - ) - - tool_node = ToolNode([add], on_tool_call=short_circuit_handler) - - result = await tool_node.ainvoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 2, "b": 3}, - "id": "call_17", - } - ], - ) - ] - } - ) - - tool_message = result["messages"][-1] - assert isinstance(tool_message, ToolMessage) - assert tool_message.content == "async_cached_result" - assert tool_message.tool_call_id == "call_17" - - -def test_conditional_short_circuit() -> None: - """Test handler that conditionally short-circuits based on request.""" - call_count = {"count": 0} - - def conditional_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that caches even numbers, executes odd.""" - call_count["count"] += 1 - a = request.tool_call["args"]["a"] - - if a % 2 == 0: - # Even: use cached result - return ToolMessage( - content=f"cached_{a}", - tool_call_id=request.tool_call["id"], - name=request.tool_call["name"], - ) - # Odd: execute normally - return execute(request) - - tool_node = ToolNode([add], on_tool_call=conditional_handler) - - # Test with even number (should be cached) - result1 = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 2, "b": 3}, - "id": "call_18", - } - ], - ) - ] - } - ) - - tool_message1 = result1["messages"][-1] - assert tool_message1.content == "cached_2" - - # Test with odd number (should execute) - result2 = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 3, "b": 4}, - "id": "call_19", - } - ], - ) - ] - } - ) - - tool_message2 = result2["messages"][-1] - assert tool_message2.content == "7" # Actual execution: 3 + 4 - - -def test_direct_return_tool_message() -> None: - """Test handler that returns ToolMessage directly without calling execute.""" - - def direct_return_handler( - request: ToolCallRequest, - _execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that returns ToolMessage directly.""" - # Return ToolMessage directly instead of calling execute - return ToolMessage( - content="direct_return", - tool_call_id=request.tool_call["id"], - name=request.tool_call["name"], - ) - - tool_node = ToolNode([add], on_tool_call=direct_return_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_21", - } - ], - ) - ] - } - ) - - tool_message = result["messages"][-1] - assert isinstance(tool_message, ToolMessage) - assert tool_message.content == "direct_return" - assert tool_message.tool_call_id == "call_21" - assert tool_message.name == "add" - - -async def test_direct_return_tool_message_async() -> None: - """Test async handler that returns ToolMessage directly without calling execute.""" - - def direct_return_handler( - request: ToolCallRequest, - _execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that returns ToolMessage directly.""" - return ToolMessage( - content="async_direct_return", - tool_call_id=request.tool_call["id"], - name=request.tool_call["name"], - ) - - tool_node = ToolNode([add], on_tool_call=direct_return_handler) - - result = await tool_node.ainvoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 2, "b": 3}, - "id": "call_22", - } - ], - ) - ] - } - ) - - tool_message = result["messages"][-1] - assert isinstance(tool_message, ToolMessage) - assert tool_message.content == "async_direct_return" - assert tool_message.tool_call_id == "call_22" - - -def test_conditional_direct_return() -> None: - """Test handler that conditionally returns ToolMessage directly or executes tool.""" - - def conditional_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that returns cached or executes based on condition.""" - a = request.tool_call["args"]["a"] - - if a == 0: - # Return ToolMessage directly for zero - return ToolMessage( - content="zero_cached", - tool_call_id=request.tool_call["id"], - name=request.tool_call["name"], - ) - # Execute tool normally - return execute(request) - - tool_node = ToolNode([add], on_tool_call=conditional_handler) - - # Test with zero (should return directly) - result1 = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 0, "b": 5}, - "id": "call_23", - } - ], - ) - ] - } - ) - - tool_message1 = result1["messages"][-1] - assert tool_message1.content == "zero_cached" - - # Test with non-zero (should execute) - result2 = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 3, "b": 4}, - "id": "call_24", - } - ], - ) - ] - } - ) - - tool_message2 = result2["messages"][-1] - assert tool_message2.content == "7" # Actual execution: 3 + 4 - - -def test_handler_can_throw_exception() -> None: - """Test that a handler can throw an exception to signal error.""" - - def throwing_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that throws an exception after receiving response.""" - response = execute(request) - # Check response and throw if invalid - if isinstance(response, ToolMessage): - msg = "Handler rejected the response" - raise TypeError(msg) - return response - - tool_node = ToolNode([add], on_tool_call=throwing_handler, handle_tool_errors=True) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_exc_1", - } - ], - ) - ] - } - ) - - # Should get error message due to handle_tool_errors=True - messages = result["messages"] - assert len(messages) == 1 - assert isinstance(messages[0], ToolMessage) - assert messages[0].status == "error" - assert "Handler rejected the response" in messages[0].content - - -def test_handler_throw_without_handle_errors() -> None: - """Test that exception propagates when handle_tool_errors=False.""" - - def throwing_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that throws an exception.""" - execute(request) - msg = "Handler error" - raise ValueError(msg) - - tool_node = ToolNode([add], on_tool_call=throwing_handler, handle_tool_errors=False) - - with pytest.raises(ValueError, match="Handler error"): - tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_exc_2", - } - ], - ) - ] - } - ) - - -def test_retry_middleware_with_exception() -> None: - """Test retry middleware pattern that can call execute multiple times.""" - attempt_count = {"count": 0} - - def retry_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that can retry by calling execute multiple times.""" - max_retries = 3 - - for _attempt in range(max_retries): - attempt_count["count"] += 1 - response = execute(request) - - # Simulate checking for retriable errors - # In real use case, would check response.status or content - if isinstance(response, ToolMessage): - # For this test, just succeed immediately - return response - - # If we exhausted retries, return last response - return response - - tool_node = ToolNode([add], on_tool_call=retry_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_exc_3", - } - ], - ) - ] - } - ) - - # Should succeed after 1 attempt - assert attempt_count["count"] == 1 - messages = result["messages"] - assert len(messages) == 1 - assert isinstance(messages[0], ToolMessage) - assert messages[0].content == "3" - - -async def test_async_handler_can_throw_exception() -> None: - """Test that async execution also supports exception throwing.""" - - def throwing_handler( - _request: ToolCallRequest, - _execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that throws an exception before calling execute.""" - # Throw exception before executing (to avoid async/await complications) - msg = "Async handler rejected the request" - raise ValueError(msg) - - tool_node = ToolNode([add], on_tool_call=throwing_handler, handle_tool_errors=True) - - result = await tool_node.ainvoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_exc_4", - } - ], - ) - ] - } - ) - - # Should get error message due to handle_tool_errors=True - messages = result["messages"] - assert len(messages) == 1 - assert isinstance(messages[0], ToolMessage) - assert messages[0].status == "error" - assert "Async handler rejected the request" in messages[0].content - - -def test_handler_cannot_yield_multiple_tool_messages() -> None: - """Test that handler can only return once (not applicable to handler pattern).""" - # With handler pattern, you can only return once by definition - # This test is no longer relevant - handlers naturally return once - # Keep test for compatibility but with simple passthrough - - def single_return_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that returns once (as all handlers do).""" - return execute(request) - - tool_node = ToolNode([add], on_tool_call=single_return_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_multi_1", - } - ], - ) - ] - } - ) - - # Should succeed - handlers can only return once - assert isinstance(result, dict) - assert len(result["messages"]) == 1 - - -def test_handler_cannot_yield_request_after_tool_message() -> None: - """Test that handler pattern doesn't allow multiple returns (not applicable).""" - # With handler pattern, you can only return once - # This test is no longer relevant - - def single_return_handler( - request: ToolCallRequest, - _execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that returns cached result.""" - # Return cached result (short-circuit) - return ToolMessage("cached", tool_call_id=request.tool_call["id"], name="add") - - tool_node = ToolNode([add], on_tool_call=single_return_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_confused_1", - } - ], - ) - ] - } - ) - - # Should succeed with cached result - assert isinstance(result, dict) - assert result["messages"][0].content == "cached" - - -def test_handler_can_short_circuit_with_command() -> None: - """Test that handler can short-circuit by returning Command.""" - - def command_handler( - _request: ToolCallRequest, - _execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that short-circuits with Command.""" - # Short-circuit with Command instead of executing tool - return Command(goto="end") - - tool_node = ToolNode([add], on_tool_call=command_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_cmd_1", - } - ], - ) - ] - } - ) - - # Should get Command in result list - assert isinstance(result, list) - assert len(result) == 1 - assert isinstance(result[0], Command) - assert result[0].goto == "end" - - -def test_handler_cannot_yield_multiple_commands() -> None: - """Test that handler can only return once (not applicable to handler pattern).""" - # With handler pattern, you can only return once - # This test is no longer relevant - - def single_command_handler( - _request: ToolCallRequest, - _execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that returns Command once.""" - return Command(goto="step1") - - tool_node = ToolNode([add], on_tool_call=single_command_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_multicmd_1", - } - ], - ) - ] - } - ) - - # Should succeed - handlers naturally return once - assert isinstance(result, list) - assert len(result) == 1 - assert isinstance(result[0], Command) - assert result[0].goto == "step1" - - -def test_handler_cannot_yield_request_after_command() -> None: - """Test that handler can only return once (not applicable to handler pattern).""" - # With handler pattern, you can only return once - # This test is no longer relevant - - def command_handler( - _request: ToolCallRequest, - _execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that returns Command.""" - return Command(goto="somewhere") - - tool_node = ToolNode([add], on_tool_call=command_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "adding", - tool_calls=[ - { - "name": "add", - "args": {"a": 1, "b": 2}, - "id": "call_cmdreq_1", - } - ], - ) - ] - } - ) - - # Should succeed with Command - assert isinstance(result, list) - assert len(result) == 1 - assert isinstance(result[0], Command) - assert result[0].goto == "somewhere" - - -def test_tool_returning_command_sent_to_handler() -> None: - """Test that when tool returns Command, it's sent to handler.""" - received_commands = [] - - def command_inspector_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that inspects Command returned by tool.""" - result = execute(request) - # Should receive Command from tool - if isinstance(result, Command): - received_commands.append(result) - return result - - tool_node = ToolNode([command_tool], on_tool_call=command_inspector_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "navigating", - tool_calls=[ - { - "name": "command_tool", - "args": {"goto": "next_step"}, - "id": "call_cmdtool_1", - } - ], - ) - ] - } - ) - - # Handler should have received the Command - assert len(received_commands) == 1 - assert received_commands[0].goto == "next_step" - - # Final result should be the Command in result list - assert isinstance(result, list) - assert len(result) == 1 - assert isinstance(result[0], Command) - assert result[0].goto == "next_step" - - -def test_handler_can_modify_command_from_tool() -> None: - """Test that handler can inspect and modify Command from tool.""" - - def command_modifier_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that modifies Command returned by tool.""" - result = execute(request) - # Modify the Command - if isinstance(result, Command): - return Command(goto=f"modified_{result.goto}") - return result - - tool_node = ToolNode([command_tool], on_tool_call=command_modifier_handler) - - result = tool_node.invoke( - { - "messages": [ - AIMessage( - "navigating", - tool_calls=[ - { - "name": "command_tool", - "args": {"goto": "original"}, - "id": "call_cmdmod_1", - } - ], - ) - ] - } - ) - - # Final result should be the modified Command in result list - assert isinstance(result, list) - assert len(result) == 1 - assert isinstance(result[0], Command) - assert result[0].goto == "modified_original" - - -def test_state_extraction_with_dict_input() -> None: - """Test that state is correctly passed when input is a dict.""" - state_seen = [] - - def state_inspector_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that records the state it receives.""" - state_seen.append(request.state) - return execute(request) - - tool_node = ToolNode([add], on_tool_call=state_inspector_handler) - - input_state = { - "messages": [ - AIMessage( - "test", - tool_calls=[{"name": "add", "args": {"a": 1, "b": 2}, "id": "call_1"}], - ) - ], - "other_field": "value", - } - - tool_node.invoke(input_state) - - # State should be the dict we passed in - assert len(state_seen) == 1 - assert state_seen[0] == input_state - assert isinstance(state_seen[0], dict) - assert "messages" in state_seen[0] - assert "other_field" in state_seen[0] - assert "__type" not in state_seen[0] - - -def test_state_extraction_with_list_input() -> None: - """Test that state is correctly passed when input is a list.""" - state_seen = [] - - def state_inspector_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that records the state it receives.""" - state_seen.append(request.state) - return execute(request) - - tool_node = ToolNode([add], on_tool_call=state_inspector_handler) - - input_state = [ - AIMessage( - "test", - tool_calls=[{"name": "add", "args": {"a": 1, "b": 2}, "id": "call_1"}], - ) - ] - - tool_node.invoke(input_state) - - # State should be the list we passed in - assert len(state_seen) == 1 - assert state_seen[0] == input_state - assert isinstance(state_seen[0], list) - - -def test_state_extraction_with_tool_call_with_context() -> None: - """Test that state is correctly extracted from ToolCallWithContext. - - This tests the scenario where ToolNode is invoked via the Send API in - create_agent, which wraps the tool call with additional context including - the graph state. - """ - state_seen = [] - - def state_inspector_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that records the state it receives.""" - state_seen.append(request.state) - return execute(request) - - tool_node = ToolNode([add], on_tool_call=state_inspector_handler) - - # Simulate ToolCallWithContext as used by create_agent with Send API - actual_state = { - "messages": [AIMessage("test")], - "thread_model_call_count": 1, - "run_model_call_count": 1, - "custom_field": "custom_value", - } - - tool_call_with_context = { - "__type": "tool_call_with_context", - "tool_call": {"name": "add", "args": {"a": 1, "b": 2}, "id": "call_1", "type": "tool_call"}, - "state": actual_state, - } - - tool_node.invoke(tool_call_with_context) - - # State should be the extracted state from ToolCallWithContext, not the wrapper - assert len(state_seen) == 1 - assert state_seen[0] == actual_state - assert isinstance(state_seen[0], dict) - assert "messages" in state_seen[0] - assert "thread_model_call_count" in state_seen[0] - assert "custom_field" in state_seen[0] - # Most importantly, __type should NOT be in the extracted state - assert "__type" not in state_seen[0] - # And tool_call should not be in the state - assert "tool_call" not in state_seen[0] - - -async def test_state_extraction_with_tool_call_with_context_async() -> None: - """Test that state is correctly extracted from ToolCallWithContext in async mode.""" - state_seen = [] - - def state_inspector_handler( - request: ToolCallRequest, - execute: Callable[[ToolCallRequest], ToolMessage | Command], - ) -> ToolMessage | Command: - """Handler that records the state it receives.""" - state_seen.append(request.state) - return execute(request) - - tool_node = ToolNode([add], on_tool_call=state_inspector_handler) - - # Simulate ToolCallWithContext as used by create_agent with Send API - actual_state = { - "messages": [AIMessage("test")], - "thread_model_call_count": 1, - "run_model_call_count": 1, - } - - tool_call_with_context = { - "__type": "tool_call_with_context", - "tool_call": {"name": "add", "args": {"a": 1, "b": 2}, "id": "call_1", "type": "tool_call"}, - "state": actual_state, - } - - await tool_node.ainvoke(tool_call_with_context) - - # State should be the extracted state from ToolCallWithContext - assert len(state_seen) == 1 - assert state_seen[0] == actual_state - assert "__type" not in state_seen[0] - assert "tool_call" not in state_seen[0] diff --git a/libs/langchain_v1/uv.lock b/libs/langchain_v1/uv.lock index 0242907e69f..8dd3cacdba7 100644 --- a/libs/langchain_v1/uv.lock +++ b/libs/langchain_v1/uv.lock @@ -193,6 +193,149 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, ] +[[package]] +name = "azure-ai-agents" +version = "1.2.0b6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/32/f4e534dc05dfb714705df56a190d690c5452cd4dd7e936612cb1adddc44f/azure_ai_agents-1.2.0b6.tar.gz", hash = "sha256:d3c10848c3b19dec98a292f8c10cee4ba4aac1050d4faabf9c2e2456b727f528", size = 396865, upload-time = "2025-10-24T18:04:47.877Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/d0/930c522f5fa9da163de057e57f8b44539424e13f46618c52624ebc712293/azure_ai_agents-1.2.0b6-py3-none-any.whl", hash = "sha256:ce23ad8fb9791118905be1ec8eae5c907cca2e536a455f1d3b830062c72cf2a7", size = 217950, upload-time = "2025-10-24T18:04:49.72Z" }, +] + +[[package]] +name = "azure-ai-inference" +version = "1.0.0b9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/6a/ed85592e5c64e08c291992f58b1a94dab6869f28fb0f40fd753dced73ba6/azure_ai_inference-1.0.0b9.tar.gz", hash = "sha256:1feb496bd84b01ee2691befc04358fa25d7c344d8288e99364438859ad7cd5a4", size = 182408, upload-time = "2025-02-15T00:37:28.464Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/0f/27520da74769db6e58327d96c98e7b9a07ce686dff582c9a5ec60b03f9dd/azure_ai_inference-1.0.0b9-py3-none-any.whl", hash = "sha256:49823732e674092dad83bb8b0d1b65aa73111fab924d61349eb2a8cdc0493990", size = 124885, upload-time = "2025-02-15T00:37:29.964Z" }, +] + +[package.optional-dependencies] +opentelemetry = [ + { name = "azure-core-tracing-opentelemetry" }, +] + +[[package]] +name = "azure-ai-projects" +version = "1.1.0b4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-ai-agents" }, + { name = "azure-core" }, + { name = "azure-storage-blob" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/16/7a7c978a79f545d62ab4327cd704c22b5d7ade8dcfb58ea193257aebabf9/azure_ai_projects-1.1.0b4.tar.gz", hash = "sha256:39e2f1396270b375069c2d9c82ccfe91c11384eca9f61d59adbc12fb6d6a32ca", size = 147568, upload-time = "2025-09-12T17:35:08.52Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/10/8b7bd070e3cc804343dab124ce66a3b7999a72d5be0e49232cbcd1d36e18/azure_ai_projects-1.1.0b4-py3-none-any.whl", hash = "sha256:d8aab84fd7cd7c5937e78141e37ca4473dc5ed6cce2c0490c634418abe14afea", size = 126670, upload-time = "2025-09-12T17:35:10.039Z" }, +] + +[[package]] +name = "azure-common" +version = "1.1.28" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/71/f6f71a276e2e69264a97ad39ef850dca0a04fce67b12570730cb38d0ccac/azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3", size = 20914, upload-time = "2022-02-03T19:39:44.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/55/7f118b9c1b23ec15ca05d15a578d8207aa1706bc6f7c87218efffbbf875d/azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad", size = 14462, upload-time = "2022-02-03T19:39:42.417Z" }, +] + +[[package]] +name = "azure-core" +version = "1.36.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/c4/d4ff3bc3ddf155156460bff340bbe9533f99fac54ddea165f35a8619f162/azure_core-1.36.0.tar.gz", hash = "sha256:22e5605e6d0bf1d229726af56d9e92bc37b6e726b141a18be0b4d424131741b7", size = 351139, upload-time = "2025-10-15T00:33:49.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/3c/b90d5afc2e47c4a45f4bba00f9c3193b0417fad5ad3bb07869f9d12832aa/azure_core-1.36.0-py3-none-any.whl", hash = "sha256:fee9923a3a753e94a259563429f3644aaf05c486d45b1215d098115102d91d3b", size = 213302, upload-time = "2025-10-15T00:33:51.058Z" }, +] + +[[package]] +name = "azure-core-tracing-opentelemetry" +version = "1.0.0b12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "opentelemetry-api" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/7f/5de13a331a5f2919417819cc37dcf7c897018f02f83aa82b733e6629a6a6/azure_core_tracing_opentelemetry-1.0.0b12.tar.gz", hash = "sha256:bb454142440bae11fd9d68c7c1d67ae38a1756ce808c5e4d736730a7b4b04144", size = 26010, upload-time = "2025-03-21T00:18:37.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/5e/97a471f66935e7f89f521d0e11ae49c7f0871ca38f5c319dccae2155c8d8/azure_core_tracing_opentelemetry-1.0.0b12-py3-none-any.whl", hash = "sha256:38fd42709f1cc4bbc4f2797008b1c30a6a01617e49910c05daa3a0d0c65053ac", size = 11962, upload-time = "2025-03-21T00:18:38.581Z" }, +] + +[[package]] +name = "azure-cosmos" +version = "4.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/ea/d1818eed2915c4b67d3ddfb67bf661784456c9c4eedd5c7619f08fcef33d/azure_cosmos-4.14.0.tar.gz", hash = "sha256:3cc7ca6a68b87e4da18f9e9b07a4a9bb03ddf015b4ed1f48f7fe140e6d6689b0", size = 2013062, upload-time = "2025-10-13T21:08:49.959Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/1c/874b99c5c00f3ed658c334ab51af34510e4cbc5a783bf62e983fbf24e127/azure_cosmos-4.14.0-py3-none-any.whl", hash = "sha256:9d659e9be3d13b95c639f7fbae6b159cb62025d16aa17e1a4171077986c28a58", size = 385868, upload-time = "2025-10-13T21:08:52.115Z" }, +] + +[[package]] +name = "azure-identity" +version = "1.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "cryptography" }, + { name = "msal" }, + { name = "msal-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/8d/1a6c41c28a37eab26dc85ab6c86992c700cd3f4a597d9ed174b0e9c69489/azure_identity-1.25.1.tar.gz", hash = "sha256:87ca8328883de6036443e1c37b40e8dc8fb74898240f61071e09d2e369361456", size = 279826, upload-time = "2025-10-06T20:30:02.194Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/7b/5652771e24fff12da9dde4c20ecf4682e606b104f26419d139758cc935a6/azure_identity-1.25.1-py3-none-any.whl", hash = "sha256:e9edd720af03dff020223cd269fa3a61e8f345ea75443858273bcb44844ab651", size = 191317, upload-time = "2025-10-06T20:30:04.251Z" }, +] + +[[package]] +name = "azure-search-documents" +version = "11.7.0b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-common" }, + { name = "azure-core" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/9a/819cb9375e431eac8f337777526a88e5f8e10cbf2efd877d39c470f49aa2/azure_search_documents-11.7.0b1.tar.gz", hash = "sha256:0324bd6732dd79c2bc4b6f6429d5a1b96129077d4706620af6f74af4b952fe0c", size = 395057, upload-time = "2025-09-04T23:30:32.571Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/14/8e9bc312fffc21e5220c8ce87f064009e7f577fef69bade2a9713032b646/azure_search_documents-11.7.0b1-py3-none-any.whl", hash = "sha256:aae83b501246438eff77aa3930de9fdd3532c0d219018342fd9d85c682814376", size = 413982, upload-time = "2025-09-04T23:30:34.252Z" }, +] + +[[package]] +name = "azure-storage-blob" +version = "12.27.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "cryptography" }, + { name = "isodate" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/36/7c/2fd872e11a88163f208b9c92de273bf64bb22d0eef9048cc6284d128a77a/azure_storage_blob-12.27.1.tar.gz", hash = "sha256:a1596cc4daf5dac9be115fcb5db67245eae894cf40e4248243754261f7b674a6", size = 597579, upload-time = "2025-10-29T12:27:16.185Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/9e/1c90a122ea6180e8c72eb7294adc92531b0e08eb3d2324c2ba70d37f4802/azure_storage_blob-12.27.1-py3-none-any.whl", hash = "sha256:65d1e25a4628b7b6acd20ff7902d8da5b4fde8e46e19c8f6d213a3abc3ece272", size = 428954, upload-time = "2025-10-29T12:27:18.072Z" }, +] + [[package]] name = "backports-asyncio-runner" version = "1.2.0" @@ -680,6 +823,71 @@ toml = [ { name = "tomli", marker = "python_full_version <= '3.11'" }, ] +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, + { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, + { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cd/1a8633802d766a0fa46f382a77e096d7e209e0817892929655fe0586ae32/cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32", size = 3689163, upload-time = "2025-10-15T23:18:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/4c/59/6b26512964ace6480c3e54681a9859c974172fb141c38df11eadd8416947/cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c", size = 3429474, upload-time = "2025-10-15T23:18:15.477Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +] + [[package]] name = "dataclasses-json" version = "0.6.7" @@ -1123,6 +1331,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d", size = 584358, upload-time = "2025-08-07T13:18:23.708Z" }, { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5", size = 1113550, upload-time = "2025-08-07T13:42:37.467Z" }, { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f", size = 1137126, upload-time = "2025-08-07T13:18:20.239Z" }, + { url = "https://files.pythonhosted.org/packages/f1/29/74242b7d72385e29bcc5563fba67dad94943d7cd03552bac320d597f29b2/greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7", size = 1544904, upload-time = "2025-11-04T12:42:04.763Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e2/1572b8eeab0f77df5f6729d6ab6b141e4a84ee8eb9bc8c1e7918f94eda6d/greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8", size = 1611228, upload-time = "2025-11-04T12:42:08.423Z" }, { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c", size = 298654, upload-time = "2025-08-07T13:50:00.469Z" }, { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, @@ -1132,6 +1342,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, @@ -1141,6 +1353,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846, upload-time = "2025-11-04T12:42:15.191Z" }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814, upload-time = "2025-11-04T12:42:17.175Z" }, { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, @@ -1150,6 +1364,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b", size = 1564759, upload-time = "2025-11-04T12:42:19.395Z" }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929", size = 1634288, upload-time = "2025-11-04T12:42:21.174Z" }, { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, @@ -1157,6 +1373,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269", size = 1612508, upload-time = "2025-11-04T12:42:23.427Z" }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681", size = 1680760, upload-time = "2025-11-04T12:42:25.341Z" }, { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" }, ] @@ -1379,22 +1597,21 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "1.0.0rc2" +version = "0.35.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, { name = "fsspec" }, { name = "hf-xet", marker = "platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" }, - { name = "httpx" }, { name = "packaging" }, { name = "pyyaml" }, + { name = "requests" }, { name = "tqdm" }, - { name = "typer-slim" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/e1/987c56200ab835d3a46ac7a26690ee1d6a90326c7759d5b8cfad24e662c4/huggingface_hub-1.0.0rc2.tar.gz", hash = "sha256:d9bbfbab9a6e882bbb38cc6e24ef896de1f2a727a71bdd0f01212e4a1187be71", size = 433269, upload-time = "2025-09-25T15:03:20.387Z" } +sdist = { url = "https://files.pythonhosted.org/packages/10/7e/a0a97de7c73671863ca6b3f61fa12518caf35db37825e43d63a70956738c/huggingface_hub-0.35.3.tar.gz", hash = "sha256:350932eaa5cc6a4747efae85126ee220e4ef1b54e29d31c3b45c5612ddf0b32a", size = 461798, upload-time = "2025-09-29T14:29:58.625Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/99/36f0611b2f1ff253cb9a786cd33199e5f75da2bf30276ab461ec439bc538/huggingface_hub-1.0.0rc2-py3-none-any.whl", hash = "sha256:d7fc9d745b5a539d5bf6bcf016e0e36b5db42990476b1d0bb993afdf82c2170e", size = 528661, upload-time = "2025-09-25T15:03:18.295Z" }, + { url = "https://files.pythonhosted.org/packages/31/a0/651f93d154cb72323358bf2bbae3e642bdb5d2f1bfc874d096f7cb159fa0/huggingface_hub-0.35.3-py3-none-any.whl", hash = "sha256:0e3a01829c19d86d03793e4577816fe3bdfc1602ac62c7fb220d593d351224ba", size = 564262, upload-time = "2025-09-29T14:29:55.813Z" }, ] [[package]] @@ -1415,6 +1632,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + [[package]] name = "iniconfig" version = "2.1.0" @@ -1424,6 +1653,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] +[[package]] +name = "isodate" +version = "0.7.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" }, +] + [[package]] name = "isort" version = "5.13.2" @@ -1550,7 +1788,7 @@ wheels = [ [[package]] name = "langchain" -version = "1.0.0a12" +version = "1.0.4" source = { editable = "." } dependencies = [ { name = "langchain-core" }, @@ -1565,6 +1803,9 @@ anthropic = [ aws = [ { name = "langchain-aws" }, ] +azure-ai = [ + { name = "langchain-azure-ai" }, +] community = [ { name = "langchain-community" }, ] @@ -1583,9 +1824,15 @@ google-vertexai = [ groq = [ { name = "langchain-groq" }, ] +huggingface = [ + { name = "langchain-huggingface" }, +] mistralai = [ { name = "langchain-mistralai" }, ] +model-profiles = [ + { name = "langchain-model-profiles" }, +] ollama = [ { name = "langchain-ollama" }, ] @@ -1609,7 +1856,6 @@ lint = [ test = [ { name = "langchain-openai" }, { name = "langchain-tests" }, - { name = "langchain-text-splitters" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, @@ -1638,6 +1884,7 @@ typing = [ requires-dist = [ { name = "langchain-anthropic", marker = "extra == 'anthropic'" }, { name = "langchain-aws", marker = "extra == 'aws'" }, + { name = "langchain-azure-ai", marker = "extra == 'azure-ai'" }, { name = "langchain-community", marker = "extra == 'community'" }, { name = "langchain-core", editable = "../core" }, { name = "langchain-deepseek", marker = "extra == 'deepseek'" }, @@ -1645,23 +1892,24 @@ requires-dist = [ { name = "langchain-google-genai", marker = "extra == 'google-genai'" }, { name = "langchain-google-vertexai", marker = "extra == 'google-vertexai'" }, { name = "langchain-groq", marker = "extra == 'groq'" }, + { name = "langchain-huggingface", marker = "extra == 'huggingface'" }, { name = "langchain-mistralai", marker = "extra == 'mistralai'" }, + { name = "langchain-model-profiles", marker = "extra == 'model-profiles'" }, { name = "langchain-ollama", marker = "extra == 'ollama'" }, { name = "langchain-openai", marker = "extra == 'openai'", editable = "../partners/openai" }, { name = "langchain-perplexity", marker = "extra == 'perplexity'" }, { name = "langchain-together", marker = "extra == 'together'" }, { name = "langchain-xai", marker = "extra == 'xai'" }, - { name = "langgraph", specifier = ">=1.0.0a4,<2.0.0" }, + { name = "langgraph", specifier = ">=1.0.2,<1.1.0" }, { name = "pydantic", specifier = ">=2.7.4,<3.0.0" }, ] -provides-extras = ["community", "anthropic", "openai", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "groq", "aws", "deepseek", "xai", "perplexity"] +provides-extras = ["model-profiles", "community", "anthropic", "openai", "azure-ai", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai", "perplexity"] [package.metadata.requires-dev] lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13.0" }] test = [ { name = "langchain-openai", editable = "../partners/openai" }, { name = "langchain-tests", editable = "../standard-tests" }, - { name = "langchain-text-splitters", editable = "../text-splitters" }, { name = "pytest", specifier = ">=8.0.0,<9.0.0" }, { name = "pytest-asyncio", specifier = ">=0.23.2,<2.0.0" }, { name = "pytest-cov", specifier = ">=4.0.0,<8.0.0" }, @@ -1716,6 +1964,30 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9d/7b/be49a224fe3aa07ed869801356f06e1d7a321bb7f22b6f7935dce86d258a/langchain_aws-1.0.0a1-py3-none-any.whl", hash = "sha256:24207d05c619ea61dfeab0a0f7086ae388cc3f2f5c03a8ae56b12d1b77d72585", size = 146839, upload-time = "2025-09-18T20:52:35.013Z" }, ] +[[package]] +name = "langchain-azure-ai" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "azure-ai-agents" }, + { name = "azure-ai-inference", extra = ["opentelemetry"] }, + { name = "azure-ai-projects" }, + { name = "azure-core" }, + { name = "azure-cosmos" }, + { name = "azure-identity" }, + { name = "azure-search-documents" }, + { name = "langchain" }, + { name = "langchain-openai" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, + { name = "numpy", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/31/2b746d19d105c5db124bafe25e37ea548918519598f2ccf746413aeb627f/langchain_azure_ai-1.0.2.tar.gz", hash = "sha256:7ec7e46aee03967fb16a222ab48856d9a585a1b24b3eafbdc9ada96611632e07", size = 79774, upload-time = "2025-10-31T22:29:25.396Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/4a/5150b7f108f89857fe7ce2727f8a8f915298071983f1769e4cc76d1fd26c/langchain_azure_ai-1.0.2-py3-none-any.whl", hash = "sha256:cf5e746b59553232c0b6011f6591ab9fdcb69d310d64e3867847983502a29387", size = 96244, upload-time = "2025-10-31T22:29:24.089Z" }, +] + [[package]] name = "langchain-community" version = "0.3.30" @@ -1742,7 +2014,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a8" +version = "1.0.3" source = { editable = "../core" } dependencies = [ { name = "jsonpatch" }, @@ -1776,6 +2048,7 @@ test = [ { name = "blockbuster", specifier = ">=1.5.18,<1.6.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "grandalf", specifier = ">=0.8.0,<1.0.0" }, + { name = "langchain-model-profiles", directory = "../model-profiles" }, { name = "langchain-tests", directory = "../standard-tests" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, @@ -1792,6 +2065,7 @@ test = [ ] test-integration = [] typing = [ + { name = "langchain-model-profiles", directory = "../model-profiles" }, { name = "langchain-text-splitters", directory = "../text-splitters" }, { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, @@ -1876,6 +2150,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d7/7c/3bbe58c4da4fd2e86ac35d7305027e1fe9cba49e0d7a9271ae90ea21b47a/langchain_groq-1.0.0a1-py3-none-any.whl", hash = "sha256:2067e25f2be394a5cde6270d047757c2feb622f9f419d704778c355ce8d9d084", size = 17516, upload-time = "2025-10-02T23:21:33.892Z" }, ] +[[package]] +name = "langchain-huggingface" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, + { name = "langchain-core" }, + { name = "tokenizers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/0d/2a22659534cc70410573a53d32756cf7971de7923ba2ccf940c03ecbe12a/langchain_huggingface-1.0.0.tar.gz", hash = "sha256:0d2eb924ff77dc08bb7dd340ab10d47c1b71372e4297bc12e84c4a66df9a4414", size = 247750, upload-time = "2025-10-17T15:30:35.179Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/30/7476a926e31498ebc4be3131e06650c5136ffb8ba12067f56212e187dd7c/langchain_huggingface-1.0.0-py3-none-any.whl", hash = "sha256:06d6ac57951c6a1c47d329c38f2b32472a839eab2fa14883be784916ea075da0", size = 27493, upload-time = "2025-10-17T15:30:34.023Z" }, +] + [[package]] name = "langchain-mistralai" version = "0.2.12" @@ -1892,6 +2180,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/64/fe/a4bf7240beb12ebaf9f1780938ec4402b40e7fa5ffcedc7c25473c2078ed/langchain_mistralai-0.2.12-py3-none-any.whl", hash = "sha256:64a85947776017eec787b586f4bfa092d237c5e95a9ed719b5ff22a81747dedf", size = 16695, upload-time = "2025-09-18T15:47:39.591Z" }, ] +[[package]] +name = "langchain-model-profiles" +version = "0.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/67/b38584d603a20fa286bf5939ddc54ba4ca981269a2d68fd56911a9c01ae1/langchain_model_profiles-0.0.2.tar.gz", hash = "sha256:3ed302e35ffa5fe0b9e3480d8e7c7ea98131bf4ebf3568e800e97b047159d58a", size = 145439, upload-time = "2025-10-31T18:00:29.698Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/fd/3cc2ea700d0331e0edb066789b74498262d405a45816290e890337796cee/langchain_model_profiles-0.0.2-py3-none-any.whl", hash = "sha256:c2db038573de419195d1fd6aa54d9a8f3ec6b951619950b755ffbc59cdc9f666", size = 30018, upload-time = "2025-10-31T18:00:27.921Z" }, +] + [[package]] name = "langchain-ollama" version = "1.0.0a1" @@ -1907,7 +2208,7 @@ wheels = [ [[package]] name = "langchain-openai" -version = "1.0.0a4" +version = "1.0.2" source = { editable = "../partners/openai" } dependencies = [ { name = "langchain-core" }, @@ -1927,6 +2228,7 @@ dev = [{ name = "langchain-core", editable = "../core" }] lint = [{ name = "ruff", specifier = ">=0.13.1,<0.14.0" }] test = [ { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, + { name = "langchain", editable = "." }, { name = "langchain-core", editable = "../core" }, { name = "langchain-tests", editable = "../standard-tests" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, @@ -1946,7 +2248,7 @@ test-integration = [ { name = "httpx", specifier = ">=0.27.0,<1.0.0" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, - { name = "pillow", specifier = ">=10.3.0,<11.0.0" }, + { name = "pillow", specifier = ">=10.3.0,<12.0.0" }, ] typing = [ { name = "langchain-core", editable = "../core" }, @@ -1969,7 +2271,7 @@ wheels = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.1" source = { editable = "../standard-tests" } dependencies = [ { name = "httpx" }, @@ -2014,7 +2316,7 @@ typing = [ [[package]] name = "langchain-text-splitters" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "../text-splitters" } dependencies = [ { name = "langchain-core" }, @@ -2047,8 +2349,8 @@ test-integration = [ { name = "nltk", specifier = ">=3.9.1,<4.0.0" }, { name = "scipy", marker = "python_full_version == '3.12.*'", specifier = ">=1.7.0,<2.0.0" }, { name = "scipy", marker = "python_full_version >= '3.13'", specifier = ">=1.14.1,<2.0.0" }, - { name = "sentence-transformers", specifier = ">=3.0.1,<4.0.0" }, - { name = "spacy", specifier = ">=3.8.7,<4.0.0" }, + { name = "sentence-transformers", marker = "python_full_version < '3.14'", specifier = ">=3.0.1,<4.0.0" }, + { name = "spacy", marker = "python_full_version < '3.14'", specifier = ">=3.8.7,<4.0.0" }, { name = "thinc", specifier = ">=8.3.6,<9.0.0" }, { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "transformers", specifier = ">=4.51.3,<5.0.0" }, @@ -2107,7 +2409,7 @@ wheels = [ [[package]] name = "langgraph" -version = "1.0.0a4" +version = "1.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, @@ -2117,35 +2419,35 @@ dependencies = [ { name = "pydantic" }, { name = "xxhash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/9e/fcf7d2dd275654a077c224c38e33674b1e5753146b7c7a228f3074767c3c/langgraph-1.0.0a4.tar.gz", hash = "sha256:7263648049967d4e2c15c6c94532c4398a05ef785ad09bd50cb2821f137ad0b9", size = 465213, upload-time = "2025-09-29T12:13:43.729Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/25/18e6e056ee1a8af64fcab441b4a3f2e158399935b08f148c7718fc42ecdb/langgraph-1.0.2.tar.gz", hash = "sha256:dae1af08d6025cb1fcaed68f502c01af7d634d9044787c853a46c791cfc52f67", size = 482660, upload-time = "2025-10-29T18:38:28.374Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/f1/6d19322b63840290d95defaf9787014455e8aa8394dcb67bcdd57a6f72fa/langgraph-1.0.0a4-py3-none-any.whl", hash = "sha256:91d26fd0f8045c3008673858c79d8aae7821cdef269443946d192719416c0e49", size = 154928, upload-time = "2025-09-29T12:13:42.154Z" }, + { url = "https://files.pythonhosted.org/packages/d7/b1/9f4912e13d4ed691f2685c8a4b764b5a9237a30cca0c5782bc213d9f0a9a/langgraph-1.0.2-py3-none-any.whl", hash = "sha256:b3d56b8c01de857b5fb1da107e8eab6e30512a377685eeedb4f76456724c9729", size = 156751, upload-time = "2025-10-29T18:38:26.577Z" }, ] [[package]] name = "langgraph-checkpoint" -version = "2.1.1" +version = "3.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "ormsgpack" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/73/3e/d00eb2b56c3846a0cabd2e5aa71c17a95f882d4f799a6ffe96a19b55eba9/langgraph_checkpoint-2.1.1.tar.gz", hash = "sha256:72038c0f9e22260cb9bff1f3ebe5eb06d940b7ee5c1e4765019269d4f21cf92d", size = 136256, upload-time = "2025-07-17T13:07:52.411Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/cb/2a6dad2f0a14317580cc122e2a60e7f0ecabb50aaa6dc5b7a6a2c94cead7/langgraph_checkpoint-3.0.0.tar.gz", hash = "sha256:f738695ad938878d8f4775d907d9629e9fcd345b1950196effb08f088c52369e", size = 132132, upload-time = "2025-10-20T18:35:49.132Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/dd/64686797b0927fb18b290044be12ae9d4df01670dce6bb2498d5ab65cb24/langgraph_checkpoint-2.1.1-py3-none-any.whl", hash = "sha256:5a779134fd28134a9a83d078be4450bbf0e0c79fdf5e992549658899e6fc5ea7", size = 43925, upload-time = "2025-07-17T13:07:51.023Z" }, + { url = "https://files.pythonhosted.org/packages/85/2a/2efe0b5a72c41e3a936c81c5f5d8693987a1b260287ff1bbebaae1b7b888/langgraph_checkpoint-3.0.0-py3-none-any.whl", hash = "sha256:560beb83e629784ab689212a3d60834fb3196b4bbe1d6ac18e5cad5d85d46010", size = 46060, upload-time = "2025-10-20T18:35:48.255Z" }, ] [[package]] name = "langgraph-prebuilt" -version = "0.7.0a2" +version = "1.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "langgraph-checkpoint" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/a2/8c82bad7400328a10953e52355933a9e79778fbb7bc3389be6240be541af/langgraph_prebuilt-0.7.0a2.tar.gz", hash = "sha256:ecf154a68be5eb3316544c2df47a19e4cc0e2ce1e2bbd971ba28533695fa9ddc", size = 113658, upload-time = "2025-09-02T17:07:02.547Z" } +sdist = { url = "https://files.pythonhosted.org/packages/33/2f/b940590436e07b3450fe6d791aad5e581363ad536c4f1771e3ba46530268/langgraph_prebuilt-1.0.2.tar.gz", hash = "sha256:9896dbabf04f086eb59df4294f54ab5bdb21cd78e27e0a10e695dffd1cc6097d", size = 142075, upload-time = "2025-10-29T18:29:00.401Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/b9/e59ecfa7cac69fdcfa1274a7a575de64ba0351da30cf35be9dcb7f3b33c7/langgraph_prebuilt-0.7.0a2-py3-none-any.whl", hash = "sha256:757b93a3e44802ba18623bdca46384fae109736758496a83b043ce4b5074bc47", size = 28398, upload-time = "2025-09-02T17:07:01.633Z" }, + { url = "https://files.pythonhosted.org/packages/27/2f/9a7d00d4afa036e65294059c7c912002fb72ba5dbbd5c2a871ca06360278/langgraph_prebuilt-1.0.2-py3-none-any.whl", hash = "sha256:d9499f7c449fb637ee7b87e3f6a3b74095f4202053c74d33894bd839ea4c57c7", size = 34286, upload-time = "2025-10-29T18:28:59.26Z" }, ] [[package]] @@ -2409,6 +2711,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/fc/0e61d9a4e29c8679356795a40e48f647b4aad58d71bfc969f0f8f56fb912/mmh3-5.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:e7884931fe5e788163e7b3c511614130c2c59feffdc21112290a194487efb2e9", size = 40455, upload-time = "2025-07-29T07:43:29.563Z" }, ] +[[package]] +name = "msal" +version = "1.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" }, +] + +[[package]] +name = "msal-extensions" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "msal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315, upload-time = "2025-03-14T23:51:03.902Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, +] + [[package]] name = "multidict" version = "6.6.4" @@ -2790,6 +3118,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/68/83/88f64fc8f037885efa8a629d1215f5bc1f037453bab4d4f823b5533319eb/openai-2.1.0-py3-none-any.whl", hash = "sha256:33172e8c06a4576144ba4137a493807a9ca427421dcabc54ad3aa656daf757d3", size = 964939, upload-time = "2025-10-02T20:43:13.568Z" }, ] +[[package]] +name = "opentelemetry-api" +version = "1.38.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/d8/0f354c375628e048bd0570645b310797299754730079853095bf000fba69/opentelemetry_api-1.38.0.tar.gz", hash = "sha256:f4c193b5e8acb0912b06ac5b16321908dd0843d75049c091487322284a3eea12", size = 65242, upload-time = "2025-10-16T08:35:50.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/a2/d86e01c28300bd41bab8f18afd613676e2bd63515417b77636fc1add426f/opentelemetry_api-1.38.0-py3-none-any.whl", hash = "sha256:2891b0197f47124454ab9f0cf58f3be33faca394457ac3e09daba13ff50aa582", size = 65947, upload-time = "2025-10-16T08:35:30.23Z" }, +] + [[package]] name = "orjson" version = "3.11.3" @@ -2869,42 +3210,57 @@ wheels = [ [[package]] name = "ormsgpack" -version = "1.10.0" +version = "1.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/36/44eed5ef8ce93cded76a576780bab16425ce7876f10d3e2e6265e46c21ea/ormsgpack-1.10.0.tar.gz", hash = "sha256:7f7a27efd67ef22d7182ec3b7fa7e9d147c3ad9be2a24656b23c989077e08b16", size = 58629, upload-time = "2025-05-24T19:07:53.944Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/f8/224c342c0e03e131aaa1a1f19aa2244e167001783a433f4eed10eedd834b/ormsgpack-1.11.0.tar.gz", hash = "sha256:7c9988e78fedba3292541eb3bb274fa63044ef4da2ddb47259ea70c05dee4206", size = 49357, upload-time = "2025-10-08T17:29:15.621Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/74/c2dd5daf069e3798d09d5746000f9b210de04df83834e5cb47f0ace51892/ormsgpack-1.10.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8a52c7ce7659459f3dc8dec9fd6a6c76f855a0a7e2b61f26090982ac10b95216", size = 376280, upload-time = "2025-05-24T19:06:51.3Z" }, - { url = "https://files.pythonhosted.org/packages/78/7b/30ff4bffb709e8a242005a8c4d65714fd96308ad640d31cff1b85c0d8cc4/ormsgpack-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:060f67fe927582f4f63a1260726d019204b72f460cf20930e6c925a1d129f373", size = 204335, upload-time = "2025-05-24T19:06:53.442Z" }, - { url = "https://files.pythonhosted.org/packages/8f/3f/c95b7d142819f801a0acdbd04280e8132e43b6e5a8920173e8eb92ea0e6a/ormsgpack-1.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7058ef6092f995561bf9f71d6c9a4da867b6cc69d2e94cb80184f579a3ceed5", size = 215373, upload-time = "2025-05-24T19:06:55.153Z" }, - { url = "https://files.pythonhosted.org/packages/ef/1a/e30f4bcf386db2015d1686d1da6110c95110294d8ea04f86091dd5eb3361/ormsgpack-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6f3509c1b0e51b15552d314b1d409321718122e90653122ce4b997f01453a", size = 216469, upload-time = "2025-05-24T19:06:56.555Z" }, - { url = "https://files.pythonhosted.org/packages/96/fc/7e44aeade22b91883586f45b7278c118fd210834c069774891447f444fc9/ormsgpack-1.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:51c1edafd5c72b863b1f875ec31c529f09c872a5ff6fe473b9dfaf188ccc3227", size = 384590, upload-time = "2025-05-24T19:06:58.286Z" }, - { url = "https://files.pythonhosted.org/packages/ec/78/f92c24e8446697caa83c122f10b6cf5e155eddf81ce63905c8223a260482/ormsgpack-1.10.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c780b44107a547a9e9327270f802fa4d6b0f6667c9c03c3338c0ce812259a0f7", size = 478891, upload-time = "2025-05-24T19:07:00.126Z" }, - { url = "https://files.pythonhosted.org/packages/5a/75/87449690253c64bea2b663c7c8f2dbc9ad39d73d0b38db74bdb0f3947b16/ormsgpack-1.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:137aab0d5cdb6df702da950a80405eb2b7038509585e32b4e16289604ac7cb84", size = 390121, upload-time = "2025-05-24T19:07:01.777Z" }, - { url = "https://files.pythonhosted.org/packages/69/cc/c83257faf3a5169ec29dd87121317a25711da9412ee8c1e82f2e1a00c0be/ormsgpack-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:3e666cb63030538fa5cd74b1e40cb55b6fdb6e2981f024997a288bf138ebad07", size = 121196, upload-time = "2025-05-24T19:07:03.47Z" }, - { url = "https://files.pythonhosted.org/packages/30/27/7da748bc0d7d567950a378dee5a32477ed5d15462ab186918b5f25cac1ad/ormsgpack-1.10.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:4bb7df307e17b36cbf7959cd642c47a7f2046ae19408c564e437f0ec323a7775", size = 376275, upload-time = "2025-05-24T19:07:05.128Z" }, - { url = "https://files.pythonhosted.org/packages/7b/65/c082cc8c74a914dbd05af0341c761c73c3d9960b7432bbf9b8e1e20811af/ormsgpack-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8817ae439c671779e1127ee62f0ac67afdeaeeacb5f0db45703168aa74a2e4af", size = 204335, upload-time = "2025-05-24T19:07:06.423Z" }, - { url = "https://files.pythonhosted.org/packages/46/62/17ef7e5d9766c79355b9c594cc9328c204f1677bc35da0595cc4e46449f0/ormsgpack-1.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f345f81e852035d80232e64374d3a104139d60f8f43c6c5eade35c4bac5590e", size = 215372, upload-time = "2025-05-24T19:07:08.149Z" }, - { url = "https://files.pythonhosted.org/packages/4e/92/7c91e8115fc37e88d1a35e13200fda3054ff5d2e5adf017345e58cea4834/ormsgpack-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21de648a1c7ef692bdd287fb08f047bd5371d7462504c0a7ae1553c39fee35e3", size = 216470, upload-time = "2025-05-24T19:07:09.903Z" }, - { url = "https://files.pythonhosted.org/packages/2c/86/ce053c52e2517b90e390792d83e926a7a523c1bce5cc63d0a7cd05ce6cf6/ormsgpack-1.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3a7d844ae9cbf2112c16086dd931b2acefce14cefd163c57db161170c2bfa22b", size = 384591, upload-time = "2025-05-24T19:07:11.24Z" }, - { url = "https://files.pythonhosted.org/packages/07/e8/2ad59f2ab222c6029e500bc966bfd2fe5cb099f8ab6b7ebeb50ddb1a6fe5/ormsgpack-1.10.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e4d80585403d86d7f800cf3d0aafac1189b403941e84e90dd5102bb2b92bf9d5", size = 478892, upload-time = "2025-05-24T19:07:13.147Z" }, - { url = "https://files.pythonhosted.org/packages/f4/73/f55e4b47b7b18fd8e7789680051bf830f1e39c03f1d9ed993cd0c3e97215/ormsgpack-1.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:da1de515a87e339e78a3ccf60e39f5fb740edac3e9e82d3c3d209e217a13ac08", size = 390122, upload-time = "2025-05-24T19:07:14.557Z" }, - { url = "https://files.pythonhosted.org/packages/f7/87/073251cdb93d4c6241748568b3ad1b2a76281fb2002eed16a3a4043d61cf/ormsgpack-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:57c4601812684024132cbb32c17a7d4bb46ffc7daf2fddf5b697391c2c4f142a", size = 121197, upload-time = "2025-05-24T19:07:15.981Z" }, - { url = "https://files.pythonhosted.org/packages/99/95/f3ab1a7638f6aa9362e87916bb96087fbbc5909db57e19f12ad127560e1e/ormsgpack-1.10.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:4e159d50cd4064d7540e2bc6a0ab66eab70b0cc40c618b485324ee17037527c0", size = 376806, upload-time = "2025-05-24T19:07:17.221Z" }, - { url = "https://files.pythonhosted.org/packages/6c/2b/42f559f13c0b0f647b09d749682851d47c1a7e48308c43612ae6833499c8/ormsgpack-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb47c85f3a866e29279d801115b554af0fefc409e2ed8aa90aabfa77efe5cc6", size = 204433, upload-time = "2025-05-24T19:07:18.569Z" }, - { url = "https://files.pythonhosted.org/packages/45/42/1ca0cb4d8c80340a89a4af9e6d8951fb8ba0d076a899d2084eadf536f677/ormsgpack-1.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c28249574934534c9bd5dce5485c52f21bcea0ee44d13ece3def6e3d2c3798b5", size = 215547, upload-time = "2025-05-24T19:07:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/0a/38/184a570d7c44c0260bc576d1daaac35b2bfd465a50a08189518505748b9a/ormsgpack-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1957dcadbb16e6a981cd3f9caef9faf4c2df1125e2a1b702ee8236a55837ce07", size = 216746, upload-time = "2025-05-24T19:07:21.83Z" }, - { url = "https://files.pythonhosted.org/packages/69/2f/1aaffd08f6b7fdc2a57336a80bdfb8df24e6a65ada5aa769afecfcbc6cc6/ormsgpack-1.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3b29412558c740bf6bac156727aa85ac67f9952cd6f071318f29ee72e1a76044", size = 384783, upload-time = "2025-05-24T19:07:23.674Z" }, - { url = "https://files.pythonhosted.org/packages/a9/63/3e53d6f43bb35e00c98f2b8ab2006d5138089ad254bc405614fbf0213502/ormsgpack-1.10.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6933f350c2041ec189fe739f0ba7d6117c8772f5bc81f45b97697a84d03020dd", size = 479076, upload-time = "2025-05-24T19:07:25.047Z" }, - { url = "https://files.pythonhosted.org/packages/b8/19/fa1121b03b61402bb4d04e35d164e2320ef73dfb001b57748110319dd014/ormsgpack-1.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a86de06d368fcc2e58b79dece527dc8ca831e0e8b9cec5d6e633d2777ec93d0", size = 390447, upload-time = "2025-05-24T19:07:26.568Z" }, - { url = "https://files.pythonhosted.org/packages/b0/0d/73143ecb94ac4a5dcba223402139240a75dee0cc6ba8a543788a5646407a/ormsgpack-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:35fa9f81e5b9a0dab42e09a73f7339ecffdb978d6dbf9deb2ecf1e9fc7808722", size = 121401, upload-time = "2025-05-24T19:07:28.308Z" }, - { url = "https://files.pythonhosted.org/packages/61/f8/ec5f4e03268d0097545efaab2893aa63f171cf2959cb0ea678a5690e16a1/ormsgpack-1.10.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8d816d45175a878993b7372bd5408e0f3ec5a40f48e2d5b9d8f1cc5d31b61f1f", size = 376806, upload-time = "2025-05-24T19:07:29.555Z" }, - { url = "https://files.pythonhosted.org/packages/c1/19/b3c53284aad1e90d4d7ed8c881a373d218e16675b8b38e3569d5b40cc9b8/ormsgpack-1.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90345ccb058de0f35262893751c603b6376b05f02be2b6f6b7e05d9dd6d5643", size = 204433, upload-time = "2025-05-24T19:07:30.977Z" }, - { url = "https://files.pythonhosted.org/packages/09/0b/845c258f59df974a20a536c06cace593698491defdd3d026a8a5f9b6e745/ormsgpack-1.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144b5e88f1999433e54db9d637bae6fe21e935888be4e3ac3daecd8260bd454e", size = 215549, upload-time = "2025-05-24T19:07:32.345Z" }, - { url = "https://files.pythonhosted.org/packages/61/56/57fce8fb34ca6c9543c026ebebf08344c64dbb7b6643d6ddd5355d37e724/ormsgpack-1.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2190b352509d012915921cca76267db136cd026ddee42f1b0d9624613cc7058c", size = 216747, upload-time = "2025-05-24T19:07:34.075Z" }, - { url = "https://files.pythonhosted.org/packages/b8/3f/655b5f6a2475c8d209f5348cfbaaf73ce26237b92d79ef2ad439407dd0fa/ormsgpack-1.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:86fd9c1737eaba43d3bb2730add9c9e8b5fbed85282433705dd1b1e88ea7e6fb", size = 384785, upload-time = "2025-05-24T19:07:35.83Z" }, - { url = "https://files.pythonhosted.org/packages/4b/94/687a0ad8afd17e4bce1892145d6a1111e58987ddb176810d02a1f3f18686/ormsgpack-1.10.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:33afe143a7b61ad21bb60109a86bb4e87fec70ef35db76b89c65b17e32da7935", size = 479076, upload-time = "2025-05-24T19:07:37.533Z" }, - { url = "https://files.pythonhosted.org/packages/c8/34/68925232e81e0e062a2f0ac678f62aa3b6f7009d6a759e19324dbbaebae7/ormsgpack-1.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f23d45080846a7b90feabec0d330a9cc1863dc956728412e4f7986c80ab3a668", size = 390446, upload-time = "2025-05-24T19:07:39.469Z" }, - { url = "https://files.pythonhosted.org/packages/12/ad/f4e1a36a6d1714afb7ffb74b3ababdcb96529cf4e7a216f9f7c8eda837b6/ormsgpack-1.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:534d18acb805c75e5fba09598bf40abe1851c853247e61dda0c01f772234da69", size = 121399, upload-time = "2025-05-24T19:07:40.854Z" }, + { url = "https://files.pythonhosted.org/packages/ff/3d/6996193cb2babc47fc92456223bef7d141065357ad4204eccf313f47a7b3/ormsgpack-1.11.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:03d4e658dd6e1882a552ce1d13cc7b49157414e7d56a4091fbe7823225b08cba", size = 367965, upload-time = "2025-10-08T17:28:06.736Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/c83b805dd9caebb046f4ceeed3706d0902ed2dbbcf08b8464e89f2c52e05/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb67eb913c2b703f0ed39607fc56e50724dd41f92ce080a586b4d6149eb3fe4", size = 195209, upload-time = "2025-10-08T17:28:08.395Z" }, + { url = "https://files.pythonhosted.org/packages/3a/17/427d9c4f77b120f0af01d7a71d8144771c9388c2a81f712048320e31353b/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e54175b92411f73a238e5653a998627f6660de3def37d9dd7213e0fd264ca56", size = 205868, upload-time = "2025-10-08T17:28:09.688Z" }, + { url = "https://files.pythonhosted.org/packages/82/32/a9ce218478bdbf3fee954159900e24b314ab3064f7b6a217ccb1e3464324/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca2b197f4556e1823d1319869d4c5dc278be335286d2308b0ed88b59a5afcc25", size = 207391, upload-time = "2025-10-08T17:28:11.031Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d3/4413fe7454711596fdf08adabdfa686580e4656702015108e4975f00a022/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc62388262f58c792fe1e450e1d9dbcc174ed2fb0b43db1675dd7c5ff2319d6a", size = 377078, upload-time = "2025-10-08T17:28:12.39Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ad/13fae555a45e35ca1ca929a27c9ee0a3ecada931b9d44454658c543f9b9c/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c48bc10af74adfbc9113f3fb160dc07c61ad9239ef264c17e449eba3de343dc2", size = 470776, upload-time = "2025-10-08T17:28:13.484Z" }, + { url = "https://files.pythonhosted.org/packages/36/60/51178b093ffc4e2ef3381013a67223e7d56224434fba80047249f4a84b26/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a608d3a1d4fa4acdc5082168a54513cff91f47764cef435e81a483452f5f7647", size = 380862, upload-time = "2025-10-08T17:28:14.747Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e3/1cb6c161335e2ae7d711ecfb007a31a3936603626e347c13e5e53b7c7cf8/ormsgpack-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:97217b4f7f599ba45916b9c4c4b1d5656e8e2a4d91e2e191d72a7569d3c30923", size = 112058, upload-time = "2025-10-08T17:28:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/90164d00e8e94b48eff8a17bc2f4be6b71ae356a00904bc69d5e8afe80fb/ormsgpack-1.11.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c7be823f47d8e36648d4bc90634b93f02b7d7cc7480081195f34767e86f181fb", size = 367964, upload-time = "2025-10-08T17:28:16.778Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c2/fb6331e880a3446c1341e72c77bd5a46da3e92a8e2edf7ea84a4c6c14fff/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68accf15d1b013812755c0eb7a30e1fc2f81eb603a1a143bf0cda1b301cfa797", size = 195209, upload-time = "2025-10-08T17:28:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/50/4943fb5df8cc02da6b7b1ee2c2a7fb13aebc9f963d69280b1bb02b1fb178/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:805d06fb277d9a4e503c0c707545b49cde66cbb2f84e5cf7c58d81dfc20d8658", size = 205869, upload-time = "2025-10-08T17:28:19.01Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fa/e7e06835bfea9adeef43915143ce818098aecab0cbd3df584815adf3e399/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1e57cdf003e77acc43643bda151dc01f97147a64b11cdee1380bb9698a7601c", size = 207391, upload-time = "2025-10-08T17:28:20.352Z" }, + { url = "https://files.pythonhosted.org/packages/33/f0/f28a19e938a14ec223396e94f4782fbcc023f8c91f2ab6881839d3550f32/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:37fc05bdaabd994097c62e2f3e08f66b03f856a640ede6dc5ea340bd15b77f4d", size = 377081, upload-time = "2025-10-08T17:28:21.926Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e3/73d1d7287637401b0b6637e30ba9121e1aa1d9f5ea185ed9834ca15d512c/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a6e9db6c73eb46b2e4d97bdffd1368a66f54e6806b563a997b19c004ef165e1d", size = 470779, upload-time = "2025-10-08T17:28:22.993Z" }, + { url = "https://files.pythonhosted.org/packages/9c/46/7ba7f9721e766dd0dfe4cedf444439447212abffe2d2f4538edeeec8ccbd/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9c44eae5ac0196ffc8b5ed497c75511056508f2303fa4d36b208eb820cf209e", size = 380865, upload-time = "2025-10-08T17:28:24.012Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7d/bb92a0782bbe0626c072c0320001410cf3f6743ede7dc18f034b1a18edef/ormsgpack-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:11d0dfaf40ae7c6de4f7dbd1e4892e2e6a55d911ab1774357c481158d17371e4", size = 112058, upload-time = "2025-10-08T17:28:25.015Z" }, + { url = "https://files.pythonhosted.org/packages/28/1a/f07c6f74142815d67e1d9d98c5b2960007100408ade8242edac96d5d1c73/ormsgpack-1.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:0c63a3f7199a3099c90398a1bdf0cb577b06651a442dc5efe67f2882665e5b02", size = 105894, upload-time = "2025-10-08T17:28:25.93Z" }, + { url = "https://files.pythonhosted.org/packages/1e/16/2805ebfb3d2cbb6c661b5fae053960fc90a2611d0d93e2207e753e836117/ormsgpack-1.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3434d0c8d67de27d9010222de07fb6810fb9af3bb7372354ffa19257ac0eb83b", size = 368474, upload-time = "2025-10-08T17:28:27.532Z" }, + { url = "https://files.pythonhosted.org/packages/6f/39/6afae47822dca0ce4465d894c0bbb860a850ce29c157882dbdf77a5dd26e/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2da5bd097e8dbfa4eb0d4ccfe79acd6f538dee4493579e2debfe4fc8f4ca89b", size = 195321, upload-time = "2025-10-08T17:28:28.573Z" }, + { url = "https://files.pythonhosted.org/packages/f6/54/11eda6b59f696d2f16de469bfbe539c9f469c4b9eef5a513996b5879c6e9/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fdbaa0a5a8606a486960b60c24f2d5235d30ac7a8b98eeaea9854bffef14dc3d", size = 206036, upload-time = "2025-10-08T17:28:29.785Z" }, + { url = "https://files.pythonhosted.org/packages/1e/86/890430f704f84c4699ddad61c595d171ea2fd77a51fbc106f83981e83939/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3682f24f800c1837017ee90ce321086b2cbaef88db7d4cdbbda1582aa6508159", size = 207615, upload-time = "2025-10-08T17:28:31.076Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b9/77383e16c991c0ecb772205b966fc68d9c519e0b5f9c3913283cbed30ffe/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fcca21202bb05ccbf3e0e92f560ee59b9331182e4c09c965a28155efbb134993", size = 377195, upload-time = "2025-10-08T17:28:32.436Z" }, + { url = "https://files.pythonhosted.org/packages/20/e2/15f9f045d4947f3c8a5e0535259fddf027b17b1215367488b3565c573b9d/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c30e5c4655ba46152d722ec7468e8302195e6db362ec1ae2c206bc64f6030e43", size = 470960, upload-time = "2025-10-08T17:28:33.556Z" }, + { url = "https://files.pythonhosted.org/packages/b8/61/403ce188c4c495bc99dff921a0ad3d9d352dd6d3c4b629f3638b7f0cf79b/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7138a341f9e2c08c59368f03d3be25e8b87b3baaf10d30fb1f6f6b52f3d47944", size = 381174, upload-time = "2025-10-08T17:28:34.781Z" }, + { url = "https://files.pythonhosted.org/packages/14/a8/94c94bc48c68da4374870a851eea03fc5a45eb041182ad4c5ed9acfc05a4/ormsgpack-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d4bd8589b78a11026d47f4edf13c1ceab9088bb12451f34396afe6497db28a27", size = 112314, upload-time = "2025-10-08T17:28:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/19/d0/aa4cf04f04e4cc180ce7a8d8ddb5a7f3af883329cbc59645d94d3ba157a5/ormsgpack-1.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:e5e746a1223e70f111d4001dab9585ac8639eee8979ca0c8db37f646bf2961da", size = 106072, upload-time = "2025-10-08T17:28:37.518Z" }, + { url = "https://files.pythonhosted.org/packages/8b/35/e34722edb701d053cf2240f55974f17b7dbfd11fdef72bd2f1835bcebf26/ormsgpack-1.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e7b36ab7b45cb95217ae1f05f1318b14a3e5ef73cb00804c0f06233f81a14e8", size = 368502, upload-time = "2025-10-08T17:28:38.547Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6a/c2fc369a79d6aba2aa28c8763856c95337ac7fcc0b2742185cd19397212a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43402d67e03a9a35cc147c8c03f0c377cad016624479e1ee5b879b8425551484", size = 195344, upload-time = "2025-10-08T17:28:39.554Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6a/0f8e24b7489885534c1a93bdba7c7c434b9b8638713a68098867db9f254c/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:64fd992f932764d6306b70ddc755c1bc3405c4c6a69f77a36acf7af1c8f5ada4", size = 206045, upload-time = "2025-10-08T17:28:40.561Z" }, + { url = "https://files.pythonhosted.org/packages/99/71/8b460ba264f3c6f82ef5b1920335720094e2bd943057964ce5287d6df83a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0362fb7fe4a29c046c8ea799303079a09372653a1ce5a5a588f3bbb8088368d0", size = 207641, upload-time = "2025-10-08T17:28:41.736Z" }, + { url = "https://files.pythonhosted.org/packages/50/cf/f369446abaf65972424ed2651f2df2b7b5c3b735c93fc7fa6cfb81e34419/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:de2f7a65a9d178ed57be49eba3d0fc9b833c32beaa19dbd4ba56014d3c20b152", size = 377211, upload-time = "2025-10-08T17:28:43.12Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3f/948bb0047ce0f37c2efc3b9bb2bcfdccc61c63e0b9ce8088d4903ba39dcf/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:f38cfae95461466055af966fc922d06db4e1654966385cda2828653096db34da", size = 470973, upload-time = "2025-10-08T17:28:44.465Z" }, + { url = "https://files.pythonhosted.org/packages/31/a4/92a8114d1d017c14aaa403445060f345df9130ca532d538094f38e535988/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c88396189d238f183cea7831b07a305ab5c90d6d29b53288ae11200bd956357b", size = 381161, upload-time = "2025-10-08T17:28:46.063Z" }, + { url = "https://files.pythonhosted.org/packages/d0/64/5b76447da654798bfcfdfd64ea29447ff2b7f33fe19d0e911a83ad5107fc/ormsgpack-1.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:5403d1a945dd7c81044cebeca3f00a28a0f4248b33242a5d2d82111628043725", size = 112321, upload-time = "2025-10-08T17:28:47.393Z" }, + { url = "https://files.pythonhosted.org/packages/46/5e/89900d06db9ab81e7ec1fd56a07c62dfbdcda398c435718f4252e1dc52a0/ormsgpack-1.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:c57357b8d43b49722b876edf317bdad9e6d52071b523fdd7394c30cd1c67d5a0", size = 106084, upload-time = "2025-10-08T17:28:48.305Z" }, + { url = "https://files.pythonhosted.org/packages/4c/0b/c659e8657085c8c13f6a0224789f422620cef506e26573b5434defe68483/ormsgpack-1.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:d390907d90fd0c908211592c485054d7a80990697ef4dff4e436ac18e1aab98a", size = 368497, upload-time = "2025-10-08T17:28:49.297Z" }, + { url = "https://files.pythonhosted.org/packages/1b/0e/451e5848c7ed56bd287e8a2b5cb5926e54466f60936e05aec6cb299f9143/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6153c2e92e789509098e04c9aa116b16673bd88ec78fbe0031deeb34ab642d10", size = 195385, upload-time = "2025-10-08T17:28:50.314Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/90f78cbbe494959f2439c2ec571f08cd3464c05a6a380b0d621c622122a9/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2b2c2a065a94d742212b2018e1fecd8f8d72f3c50b53a97d1f407418093446d", size = 206114, upload-time = "2025-10-08T17:28:51.336Z" }, + { url = "https://files.pythonhosted.org/packages/fb/db/34163f4c0923bea32dafe42cd878dcc66795a3e85669bc4b01c1e2b92a7b/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:110e65b5340f3d7ef8b0009deae3c6b169437e6b43ad5a57fd1748085d29d2ac", size = 207679, upload-time = "2025-10-08T17:28:53.627Z" }, + { url = "https://files.pythonhosted.org/packages/b6/14/04ee741249b16f380a9b4a0cc19d4134d0b7c74bab27a2117da09e525eb9/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c27e186fca96ab34662723e65b420919910acbbc50fc8e1a44e08f26268cb0e0", size = 377237, upload-time = "2025-10-08T17:28:56.12Z" }, + { url = "https://files.pythonhosted.org/packages/89/ff/53e588a6aaa833237471caec679582c2950f0e7e1a8ba28c1511b465c1f4/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d56b1f877c13d499052d37a3db2378a97d5e1588d264f5040b3412aee23d742c", size = 471021, upload-time = "2025-10-08T17:28:57.299Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f9/f20a6d9ef2be04da3aad05e8f5699957e9a30c6d5c043a10a296afa7e890/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c88e28cd567c0a3269f624b4ade28142d5e502c8e826115093c572007af5be0a", size = 381205, upload-time = "2025-10-08T17:28:58.872Z" }, + { url = "https://files.pythonhosted.org/packages/f8/64/96c07d084b479ac8b7821a77ffc8d3f29d8b5c95ebfdf8db1c03dff02762/ormsgpack-1.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:8811160573dc0a65f62f7e0792c4ca6b7108dfa50771edb93f9b84e2d45a08ae", size = 112374, upload-time = "2025-10-08T17:29:00Z" }, + { url = "https://files.pythonhosted.org/packages/88/a5/5dcc18b818d50213a3cadfe336bb6163a102677d9ce87f3d2f1a1bee0f8c/ormsgpack-1.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:23e30a8d3c17484cf74e75e6134322255bd08bc2b5b295cc9c442f4bae5f3c2d", size = 106056, upload-time = "2025-10-08T17:29:01.29Z" }, + { url = "https://files.pythonhosted.org/packages/19/2b/776d1b411d2be50f77a6e6e94a25825cca55dcacfe7415fd691a144db71b/ormsgpack-1.11.0-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2905816502adfaf8386a01dd85f936cd378d243f4f5ee2ff46f67f6298dc90d5", size = 368661, upload-time = "2025-10-08T17:29:02.382Z" }, + { url = "https://files.pythonhosted.org/packages/a9/0c/81a19e6115b15764db3d241788f9fac093122878aaabf872cc545b0c4650/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c04402fb9a0a9b9f18fbafd6d5f8398ee99b3ec619fb63952d3a954bc9d47daa", size = 195539, upload-time = "2025-10-08T17:29:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/97/86/e5b50247a61caec5718122feb2719ea9d451d30ac0516c288c1dbc6408e8/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a025ec07ac52056ecfd9e57b5cbc6fff163f62cb9805012b56cda599157f8ef2", size = 207718, upload-time = "2025-10-08T17:29:04.545Z" }, ] [[package]] @@ -3391,6 +3747,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + [[package]] name = "pytest" version = "8.4.2" @@ -3998,38 +4368,63 @@ wheels = [ [[package]] name = "tiktoken" -version = "0.11.0" +version = "0.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/86/ad0155a37c4f310935d5ac0b1ccf9bdb635dcb906e0a9a26b616dd55825a/tiktoken-0.11.0.tar.gz", hash = "sha256:3c518641aee1c52247c2b97e74d8d07d780092af79d5911a6ab5e79359d9b06a", size = 37648, upload-time = "2025-08-08T23:58:08.495Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/4d/c6a2e7dca2b4f2e9e0bfd62b3fe4f114322e2c028cfba905a72bc76ce479/tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8a9b517d6331d7103f8bef29ef93b3cca95fa766e293147fe7bacddf310d5917", size = 1059937, upload-time = "2025-08-08T23:57:28.57Z" }, - { url = "https://files.pythonhosted.org/packages/41/54/3739d35b9f94cb8dc7b0db2edca7192d5571606aa2369a664fa27e811804/tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b4ddb1849e6bf0afa6cc1c5d809fb980ca240a5fffe585a04e119519758788c0", size = 999230, upload-time = "2025-08-08T23:57:30.241Z" }, - { url = "https://files.pythonhosted.org/packages/dd/f4/ec8d43338d28d53513004ebf4cd83732a135d11011433c58bf045890cc10/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10331d08b5ecf7a780b4fe4d0281328b23ab22cdb4ff65e68d56caeda9940ecc", size = 1130076, upload-time = "2025-08-08T23:57:31.706Z" }, - { url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882", size = 1183942, upload-time = "2025-08-08T23:57:33.142Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c", size = 1244705, upload-time = "2025-08-08T23:57:34.594Z" }, - { url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1", size = 884152, upload-time = "2025-08-08T23:57:36.18Z" }, - { url = "https://files.pythonhosted.org/packages/8a/91/912b459799a025d2842566fe1e902f7f50d54a1ce8a0f236ab36b5bd5846/tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ae374c46afadad0f501046db3da1b36cd4dfbfa52af23c998773682446097cf", size = 1059743, upload-time = "2025-08-08T23:57:37.516Z" }, - { url = "https://files.pythonhosted.org/packages/8c/e9/6faa6870489ce64f5f75dcf91512bf35af5864583aee8fcb0dcb593121f5/tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25a512ff25dc6c85b58f5dd4f3d8c674dc05f96b02d66cdacf628d26a4e4866b", size = 999334, upload-time = "2025-08-08T23:57:38.595Z" }, - { url = "https://files.pythonhosted.org/packages/a1/3e/a05d1547cf7db9dc75d1461cfa7b556a3b48e0516ec29dfc81d984a145f6/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2130127471e293d385179c1f3f9cd445070c0772be73cdafb7cec9a3684c0458", size = 1129402, upload-time = "2025-08-08T23:57:39.627Z" }, - { url = "https://files.pythonhosted.org/packages/34/9a/db7a86b829e05a01fd4daa492086f708e0a8b53952e1dbc9d380d2b03677/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e43022bf2c33f733ea9b54f6a3f6b4354b909f5a73388fb1b9347ca54a069c", size = 1184046, upload-time = "2025-08-08T23:57:40.689Z" }, - { url = "https://files.pythonhosted.org/packages/9d/bb/52edc8e078cf062ed749248f1454e9e5cfd09979baadb830b3940e522015/tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:adb4e308eb64380dc70fa30493e21c93475eaa11669dea313b6bbf8210bfd013", size = 1244691, upload-time = "2025-08-08T23:57:42.251Z" }, - { url = "https://files.pythonhosted.org/packages/60/d9/884b6cd7ae2570ecdcaffa02b528522b18fef1cbbfdbcaa73799807d0d3b/tiktoken-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:ece6b76bfeeb61a125c44bbefdfccc279b5288e6007fbedc0d32bfec602df2f2", size = 884392, upload-time = "2025-08-08T23:57:43.628Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9e/eceddeffc169fc75fe0fd4f38471309f11cb1906f9b8aa39be4f5817df65/tiktoken-0.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fd9e6b23e860973cf9526544e220b223c60badf5b62e80a33509d6d40e6c8f5d", size = 1055199, upload-time = "2025-08-08T23:57:45.076Z" }, - { url = "https://files.pythonhosted.org/packages/4f/cf/5f02bfefffdc6b54e5094d2897bc80efd43050e5b09b576fd85936ee54bf/tiktoken-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76d53cee2da71ee2731c9caa747398762bda19d7f92665e882fef229cb0b5b", size = 996655, upload-time = "2025-08-08T23:57:46.304Z" }, - { url = "https://files.pythonhosted.org/packages/65/8e/c769b45ef379bc360c9978c4f6914c79fd432400a6733a8afc7ed7b0726a/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef72aab3ea240646e642413cb363b73869fed4e604dcfd69eec63dc54d603e8", size = 1128867, upload-time = "2025-08-08T23:57:47.438Z" }, - { url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd", size = 1183308, upload-time = "2025-08-08T23:57:48.566Z" }, - { url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e", size = 1244301, upload-time = "2025-08-08T23:57:49.642Z" }, - { url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f", size = 884282, upload-time = "2025-08-08T23:57:50.759Z" }, - { url = "https://files.pythonhosted.org/packages/cc/cd/a9034bcee638716d9310443818d73c6387a6a96db93cbcb0819b77f5b206/tiktoken-0.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a5f3f25ffb152ee7fec78e90a5e5ea5b03b4ea240beed03305615847f7a6ace2", size = 1055339, upload-time = "2025-08-08T23:57:51.802Z" }, - { url = "https://files.pythonhosted.org/packages/f1/91/9922b345f611b4e92581f234e64e9661e1c524875c8eadd513c4b2088472/tiktoken-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dc6e9ad16a2a75b4c4be7208055a1f707c9510541d94d9cc31f7fbdc8db41d8", size = 997080, upload-time = "2025-08-08T23:57:53.442Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9d/49cd047c71336bc4b4af460ac213ec1c457da67712bde59b892e84f1859f/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a0517634d67a8a48fd4a4ad73930c3022629a85a217d256a6e9b8b47439d1e4", size = 1128501, upload-time = "2025-08-08T23:57:54.808Z" }, - { url = "https://files.pythonhosted.org/packages/52/d5/a0dcdb40dd2ea357e83cb36258967f0ae96f5dd40c722d6e382ceee6bba9/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fb4effe60574675118b73c6fbfd3b5868e5d7a1f570d6cc0d18724b09ecf318", size = 1182743, upload-time = "2025-08-08T23:57:56.307Z" }, - { url = "https://files.pythonhosted.org/packages/3b/17/a0fc51aefb66b7b5261ca1314afa83df0106b033f783f9a7bcbe8e741494/tiktoken-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94f984c9831fd32688aef4348803b0905d4ae9c432303087bae370dc1381a2b8", size = 1244057, upload-time = "2025-08-08T23:57:57.628Z" }, - { url = "https://files.pythonhosted.org/packages/50/79/bcf350609f3a10f09fe4fc207f132085e497fdd3612f3925ab24d86a0ca0/tiktoken-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2177ffda31dec4023356a441793fed82f7af5291120751dee4d696414f54db0c", size = 883901, upload-time = "2025-08-08T23:57:59.359Z" }, + { url = "https://files.pythonhosted.org/packages/89/b3/2cb7c17b6c4cf8ca983204255d3f1d95eda7213e247e6947a0ee2c747a2c/tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3de02f5a491cfd179aec916eddb70331814bd6bf764075d39e21d5862e533970", size = 1051991, upload-time = "2025-10-06T20:21:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/27/0f/df139f1df5f6167194ee5ab24634582ba9a1b62c6b996472b0277ec80f66/tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6cfb6d9b7b54d20af21a912bfe63a2727d9cfa8fbda642fd8322c70340aad16", size = 995798, upload-time = "2025-10-06T20:21:35.579Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5d/26a691f28ab220d5edc09b9b787399b130f24327ef824de15e5d85ef21aa/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:cde24cdb1b8a08368f709124f15b36ab5524aac5fa830cc3fdce9c03d4fb8030", size = 1129865, upload-time = "2025-10-06T20:21:36.675Z" }, + { url = "https://files.pythonhosted.org/packages/b2/94/443fab3d4e5ebecac895712abd3849b8da93b7b7dec61c7db5c9c7ebe40c/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6de0da39f605992649b9cfa6f84071e3f9ef2cec458d08c5feb1b6f0ff62e134", size = 1152856, upload-time = "2025-10-06T20:21:37.873Z" }, + { url = "https://files.pythonhosted.org/packages/54/35/388f941251b2521c70dd4c5958e598ea6d2c88e28445d2fb8189eecc1dfc/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6faa0534e0eefbcafaccb75927a4a380463a2eaa7e26000f0173b920e98b720a", size = 1195308, upload-time = "2025-10-06T20:21:39.577Z" }, + { url = "https://files.pythonhosted.org/packages/f8/00/c6681c7f833dd410576183715a530437a9873fa910265817081f65f9105f/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:82991e04fc860afb933efb63957affc7ad54f83e2216fe7d319007dab1ba5892", size = 1255697, upload-time = "2025-10-06T20:21:41.154Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d2/82e795a6a9bafa034bf26a58e68fe9a89eeaaa610d51dbeb22106ba04f0a/tiktoken-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:6fb2995b487c2e31acf0a9e17647e3b242235a20832642bb7a9d1a181c0c1bb1", size = 879375, upload-time = "2025-10-06T20:21:43.201Z" }, + { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, + { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, ] [[package]] @@ -4148,19 +4543,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/39/82c9d3e10979851847361d922a373bdfef4091020da7f893acfaf07c0225/typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb", size = 45973, upload-time = "2024-03-23T17:07:53.985Z" }, ] -[[package]] -name = "typer-slim" -version = "0.19.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/75/d6/489402eda270c00555213bdd53061b23a0ae2b5dccbfe428ebcc9562d883/typer_slim-0.19.2.tar.gz", hash = "sha256:6f601e28fb8249a7507f253e35fb22ccc701403ce99bea6a9923909ddbfcd133", size = 104788, upload-time = "2025-09-23T09:47:42.917Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/19/7aef771b3293e1b7c749eebb2948bb7ccd0e9b56aa222eb4d5e015087730/typer_slim-0.19.2-py3-none-any.whl", hash = "sha256:1c9cdbbcd5b8d30f4118d3cb7c52dc63438b751903fbd980a35df1dfe10c6c91", size = 46806, upload-time = "2025-09-23T09:47:41.385Z" }, -] - [[package]] name = "types-requests" version = "2.31.0.6" @@ -4647,6 +5029,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, ] +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] + [[package]] name = "zstandard" version = "0.25.0" diff --git a/libs/model-profiles/Makefile b/libs/model-profiles/Makefile new file mode 100644 index 00000000000..0af0220d1e4 --- /dev/null +++ b/libs/model-profiles/Makefile @@ -0,0 +1,73 @@ +.PHONY: all format lint test tests integration_tests help extended_tests refresh_data + +# Default target executed when no arguments are given to make. +all: help + +.EXPORT_ALL_VARIABLES: +UV_FROZEN = true + +# Define a variable for the test file path. +TEST_FILE ?= tests/unit_tests/ + +integration_test integration_tests: TEST_FILE=tests/integration_tests/ + +# unit tests are run with the --disable-socket flag to prevent network calls +test tests: + uv run --group test pytest --disable-socket --allow-unix-socket $(TEST_FILE) + +integration_test integration_tests: + uv run --group test --group test_integration pytest -n auto $(TEST_FILE) + +test_watch: + uv run --group test ptw --snapshot-update --now . -- -vv $(TEST_FILE) + + +make benchmark: + uv run --group test pytest ./tests -m benchmark + + +###################### +# DATA MANAGEMENT +###################### + +refresh_data: + uv run --group dev python scripts/refresh_data.py + +###################### +# LINTING AND FORMATTING +###################### + +# Define a variable for Python and notebook files. +PYTHON_FILES=. +MYPY_CACHE=.mypy_cache +lint format: PYTHON_FILES=. +lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/model-profiles --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$') +lint_package: PYTHON_FILES=langchain_model_profiles +lint_tests: PYTHON_FILES=tests +lint_tests: MYPY_CACHE=.mypy_cache_test + +lint lint_diff lint_package lint_tests: + [ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check $(PYTHON_FILES) + [ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES) --diff + [ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && uv run --all-groups mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + +format format_diff: + [ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES) + [ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check --fix $(PYTHON_FILES) + +check_imports: $(shell find langchain_model_profiles -name '*.py') + uv run --all-groups python ./scripts/check_imports.py $^ + +###################### +# HELP +###################### + +help: + @echo '----' + @echo 'check_imports - check imports' + @echo 'format - run code formatters' + @echo 'lint - run linters' + @echo 'refresh_data - download latest model data from models.dev' + @echo 'test - run unit tests' + @echo 'tests - run unit tests' + @echo 'test TEST_FILE= - run all tests in file' diff --git a/libs/model-profiles/README.md b/libs/model-profiles/README.md new file mode 100644 index 00000000000..8cd8da0d8a2 --- /dev/null +++ b/libs/model-profiles/README.md @@ -0,0 +1,66 @@ +# 🦜πŸͺͺ langchain-model-profiles + +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-model-profiles?label=%20)](https://pypi.org/project/langchain-model-profiles/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-model-profiles)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-model-profiles)](https://pypistats.org/packages/langchain-model-profiles) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +> [!WARNING] +> This package is currently in development and the API is subject to change. + +Centralized reference of LLM capabilities for LangChain chat models. + +## Overview + +`langchain-model-profiles` enables programmatic access to model capabilities through a `.profile` property on LangChain chat models. + +This allows you to query model-specific features such as context window sizes, supported input/output modalities, structured output support, tool calling capabilities, and more. + +## Data sources + +This package is built on top of the excellent work by the [models.dev](https://github.com/sst/models.dev) project, an open source initiative that provides model capability data. + +This package augments the data from models.dev with some additional fields. We intend to keep this aligned with the upstream project as it evolves. + +## Installation + +```bash +pip install "langchain[model-profiles]" +``` + +Or with uv: + +```bash +uv add "langchain[model-profiles]" +``` + +## Usage + +Access model capabilities through the `.profile` property on any LangChain chat model: + +```python +# pip install "langchain[openai]" + +from langchain.chat_models import init_chat_model + +model = init_chat_model("openai:gpt-5") +profile = model.profile + +# Check specific capabilities +if profile.get("structured_output"): + print(f"This model supports a dedicated structured output feature.") + +if profile.get("max_input_tokens"): + print(f"Max input tokens: {profile.get('max_input_tokens')}") + +if profile.get("..."): + ... +``` + +## Available fields + +See `ModelProfile` in [`model_profile.py`](./langchain_model_profiles/model_profile.py) for the full list of available fields and their descriptions. + +## License + +MIT diff --git a/libs/model-profiles/extended_testing_deps.txt b/libs/model-profiles/extended_testing_deps.txt new file mode 100644 index 00000000000..b694e8136af --- /dev/null +++ b/libs/model-profiles/extended_testing_deps.txt @@ -0,0 +1,2 @@ +-e ../partners/openai +-e ../partners/anthropic diff --git a/libs/model-profiles/langchain_model_profiles/__init__.py b/libs/model-profiles/langchain_model_profiles/__init__.py new file mode 100644 index 00000000000..a8d06e38267 --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/__init__.py @@ -0,0 +1,8 @@ +"""Model profiles entrypoint.""" + +from langchain_model_profiles.model_profile import ModelProfile, get_model_profile + +__all__ = [ + "ModelProfile", + "get_model_profile", +] diff --git a/libs/model-profiles/langchain_model_profiles/_data_loader.py b/libs/model-profiles/langchain_model_profiles/_data_loader.py new file mode 100644 index 00000000000..478314244ec --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/_data_loader.py @@ -0,0 +1,141 @@ +"""Data loader for model profiles with augmentation support.""" + +import json +import sys +from functools import cached_property +from pathlib import Path +from typing import Any + +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib + + +class _DataLoader: + """Loads and merges model profile data from base and augmentations. + + See the README in `data/augmentations` directory for more details on the + augmentation structure and merge priority. + """ + + def __init__(self) -> None: + """Initialize the loader.""" + self._data_dir = Path(__file__).parent / "data" + + @property + def _base_data_path(self) -> Path: + """Get path to base data file. + + `models.json` is the downloaded data from models.dev. + """ + return self._data_dir / "models.json" + + @property + def _augmentations_dir(self) -> Path: + """Get path to augmentations directory.""" + return self._data_dir / "augmentations" + + @cached_property + def _merged_data(self) -> dict[str, Any]: + """Load and merge all data once at startup. + + Merging order: + + 1. Base data from `models.json` + 2. Provider-level augmentations from `augmentations/providers/{provider}.toml` + 3. Model-level augmentations from `augmentations/models/{provider}/{model}.toml` + + Returns: + Fully merged provider data with all augmentations applied. + """ + # Load base data; let exceptions propagate to user + with self._base_data_path.open("r") as f: + data = json.load(f) + + provider_augmentations = self._load_provider_augmentations() + model_augmentations = self._load_model_augmentations() + + # Merge contents + for provider_id, provider_data in data.items(): + models = provider_data.get("models", {}) + provider_aug = provider_augmentations.get(provider_id, {}) + + for model_id, model_data in models.items(): + if provider_aug: + model_data.update(provider_aug) + + # Apply model-level augmentations (highest priority) + model_aug = model_augmentations.get(provider_id, {}).get(model_id, {}) + if model_aug: + model_data.update(model_aug) + + return data + + def _load_provider_augmentations(self) -> dict[str, dict[str, Any]]: + """Load all provider-level augmentations. + + Returns: + `dict` mapping provider IDs to their augmentation data. + """ + augmentations: dict[str, dict[str, Any]] = {} + providers_dir = self._augmentations_dir / "providers" + + if not providers_dir.exists(): + return augmentations + + for toml_file in providers_dir.glob("*.toml"): + provider_id = toml_file.stem + with toml_file.open("rb") as f: + data = tomllib.load(f) + if "profile" in data: + augmentations[provider_id] = data["profile"] + + return augmentations + + def _load_model_augmentations(self) -> dict[str, dict[str, dict[str, Any]]]: + """Load all model-level augmentations. + + Returns: + Nested `dict`: `provider_id` -> `model_id` -> augmentation data. + """ + augmentations: dict[str, dict[str, dict[str, Any]]] = {} + models_dir = self._augmentations_dir / "models" + + if not models_dir.exists(): + return augmentations + + for provider_dir in models_dir.iterdir(): + if not provider_dir.is_dir(): + continue + + provider_id = provider_dir.name + augmentations[provider_id] = {} + + for toml_file in provider_dir.glob("*.toml"): + model_id = toml_file.stem + with toml_file.open("rb") as f: + data = tomllib.load(f) + if "profile" in data: + augmentations[provider_id][model_id] = data["profile"] + + return augmentations + + def get_profile_data( + self, provider_id: str, model_id: str + ) -> dict[str, Any] | None: + """Get merged profile data for a specific model. + + Args: + provider_id: The provider identifier. + model_id: The model identifier. + + Returns: + Merged model data `dict` or `None` if not found. + """ + provider = self._merged_data.get(provider_id) + if provider is None: + return None + + models = provider.get("models", {}) + return models.get(model_id) diff --git a/libs/model-profiles/langchain_model_profiles/data/ATTRIBUTION.txt b/libs/model-profiles/langchain_model_profiles/data/ATTRIBUTION.txt new file mode 100644 index 00000000000..a8a6ea7a580 --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/data/ATTRIBUTION.txt @@ -0,0 +1,11 @@ +Data Attribution +================ + +The models.json file in this directory contains data derived from the models.dev project. + +Source: https://github.com/sst/models.dev +Copyright (c) 2025 models.dev contributors +License: MIT License + +This package augments the original models.dev data with additional fields +specific to LangChain integration. diff --git a/libs/model-profiles/langchain_model_profiles/data/augmentations/README.md b/libs/model-profiles/langchain_model_profiles/data/augmentations/README.md new file mode 100644 index 00000000000..a4e93683d72 --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/data/augmentations/README.md @@ -0,0 +1,40 @@ +# Profile augmentations + +This directory contains LangChain-specific augmentations to the base model data from models.dev. + +## Structure + +```txt +augmentations/ +β”œβ”€β”€ providers/ # Provider-level augmentations (apply to all models from a provider) +β”‚ β”œβ”€β”€ anthropic.toml +β”‚ β”œβ”€β”€ openai.toml +β”‚ └── ... +└── models/ # Model-specific augmentations (override provider defaults) + β”œβ”€β”€ anthropic/ + β”‚ └── claude-sonnet-4-5-20250929.toml + └── openai/ + └── o1.toml +``` + +## Merge priority + +Data is merged in the following order (later overrides earlier): + +1. Base data from `models.json` (from models.dev API) +2. Provider-level augmentations from `augmentations/providers/{provider}.toml` +3. Model-level augmentations from `augmentations/models/{provider}/{model}.toml` + +## TOML format + +All augmentation files should use the following structure: + +```toml +[profile] +# Add or override model profile fields +image_url_inputs = true +pdf_inputs = true +tool_choice = true +``` + +Available fields match the `ModelProfile` TypedDict in `model_profile.py`. diff --git a/libs/model-profiles/langchain_model_profiles/data/augmentations/models/openai/gpt-3.5-turbo.toml b/libs/model-profiles/langchain_model_profiles/data/augmentations/models/openai/gpt-3.5-turbo.toml new file mode 100644 index 00000000000..0997583426c --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/data/augmentations/models/openai/gpt-3.5-turbo.toml @@ -0,0 +1,6 @@ +[profile] +image_url_inputs = false +pdf_inputs = false +pdf_tool_message = false +image_tool_message = false +structured_output = false diff --git a/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/amazon-bedrock.toml b/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/amazon-bedrock.toml new file mode 100644 index 00000000000..658df1dafdb --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/amazon-bedrock.toml @@ -0,0 +1,6 @@ +[profile] +image_url_inputs = true +pdf_inputs = true +pdf_tool_message = true +image_tool_message = true +structured_output = false diff --git a/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/anthropic.toml b/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/anthropic.toml new file mode 100644 index 00000000000..658df1dafdb --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/anthropic.toml @@ -0,0 +1,6 @@ +[profile] +image_url_inputs = true +pdf_inputs = true +pdf_tool_message = true +image_tool_message = true +structured_output = false diff --git a/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/google.toml b/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/google.toml new file mode 100644 index 00000000000..3b652c5d000 --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/google.toml @@ -0,0 +1,6 @@ +[profile] +image_url_inputs = true +pdf_inputs = true +image_tool_message = true +tool_choice = true +structured_output = true diff --git a/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/google_vertexai.toml b/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/google_vertexai.toml new file mode 100644 index 00000000000..3b652c5d000 --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/google_vertexai.toml @@ -0,0 +1,6 @@ +[profile] +image_url_inputs = true +pdf_inputs = true +image_tool_message = true +tool_choice = true +structured_output = true diff --git a/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/openai.toml b/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/openai.toml new file mode 100644 index 00000000000..305eeefd013 --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/data/augmentations/providers/openai.toml @@ -0,0 +1,7 @@ +[profile] +image_url_inputs = true +pdf_inputs = true +pdf_tool_message = true +image_tool_message = true +tool_choice = true +structured_output = true diff --git a/libs/model-profiles/langchain_model_profiles/data/models.json b/libs/model-profiles/langchain_model_profiles/data/models.json new file mode 100644 index 00000000000..3fed45989a2 --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/data/models.json @@ -0,0 +1,14502 @@ +{ + "amazon-bedrock": { + "doc": "https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html", + "env": [ + "AWS_ACCESS_KEY_ID", + "AWS_SECRET_ACCESS_KEY", + "AWS_REGION" + ], + "id": "amazon-bedrock", + "models": { + "ai21.jamba-1-5-large-v1:0": { + "attachment": false, + "cost": { + "input": 2, + "output": 8 + }, + "id": "ai21.jamba-1-5-large-v1:0", + "knowledge": "2024-08", + "last_updated": "2024-08-15", + "limit": { + "context": 256000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Jamba 1.5 Large", + "open_weights": true, + "reasoning": false, + "release_date": "2024-08-15", + "temperature": true, + "tool_call": true + }, + "ai21.jamba-1-5-mini-v1:0": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.4 + }, + "id": "ai21.jamba-1-5-mini-v1:0", + "knowledge": "2024-08", + "last_updated": "2024-08-15", + "limit": { + "context": 256000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Jamba 1.5 Mini", + "open_weights": true, + "reasoning": false, + "release_date": "2024-08-15", + "temperature": true, + "tool_call": true + }, + "amazon.nova-lite-v1:0": { + "attachment": true, + "cost": { + "cache_read": 0.015, + "input": 0.06, + "output": 0.24 + }, + "id": "amazon.nova-lite-v1:0", + "knowledge": "2024-10", + "last_updated": "2024-12-03", + "limit": { + "context": 300000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "video" + ], + "output": [ + "text" + ] + }, + "name": "Nova Lite", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-03", + "temperature": true, + "tool_call": true + }, + "amazon.nova-micro-v1:0": { + "attachment": false, + "cost": { + "cache_read": 0.00875, + "input": 0.035, + "output": 0.14 + }, + "id": "amazon.nova-micro-v1:0", + "knowledge": "2024-10", + "last_updated": "2024-12-03", + "limit": { + "context": 128000, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Nova Micro", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-03", + "temperature": true, + "tool_call": true + }, + "amazon.nova-premier-v1:0": { + "attachment": true, + "cost": { + "input": 2.5, + "output": 12.5 + }, + "id": "amazon.nova-premier-v1:0", + "knowledge": "2024-10", + "last_updated": "2024-12-03", + "limit": { + "context": 1000000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image", + "video" + ], + "output": [ + "text" + ] + }, + "name": "Nova Premier", + "open_weights": false, + "reasoning": true, + "release_date": "2024-12-03", + "temperature": true, + "tool_call": true + }, + "amazon.nova-pro-v1:0": { + "attachment": true, + "cost": { + "cache_read": 0.2, + "input": 0.8, + "output": 3.2 + }, + "id": "amazon.nova-pro-v1:0", + "knowledge": "2024-10", + "last_updated": "2024-12-03", + "limit": { + "context": 300000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "video" + ], + "output": [ + "text" + ] + }, + "name": "Nova Pro", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-03", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-3-5-haiku-20241022-v1:0": { + "attachment": true, + "cost": { + "cache_read": 0.08, + "cache_write": 1, + "input": 0.8, + "output": 4 + }, + "id": "anthropic.claude-3-5-haiku-20241022-v1:0", + "knowledge": "2024-07", + "last_updated": "2024-10-22", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 3.5", + "open_weights": false, + "reasoning": false, + "release_date": "2024-10-22", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-3-5-sonnet-20240620-v1:0": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "anthropic.claude-3-5-sonnet-20240620-v1:0", + "knowledge": "2024-04", + "last_updated": "2024-06-20", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3.5", + "open_weights": false, + "reasoning": false, + "release_date": "2024-06-20", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-3-5-sonnet-20241022-v2:0": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "anthropic.claude-3-5-sonnet-20241022-v2:0", + "knowledge": "2024-04", + "last_updated": "2024-10-22", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3.5 v2", + "open_weights": false, + "reasoning": false, + "release_date": "2024-10-22", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-3-7-sonnet-20250219-v1:0": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "anthropic.claude-3-7-sonnet-20250219-v1:0", + "knowledge": "2024-04", + "last_updated": "2025-02-19", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3.7", + "open_weights": false, + "reasoning": false, + "release_date": "2025-02-19", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-3-haiku-20240307-v1:0": { + "attachment": true, + "cost": { + "input": 0.25, + "output": 1.25 + }, + "id": "anthropic.claude-3-haiku-20240307-v1:0", + "knowledge": "2024-02", + "last_updated": "2024-03-13", + "limit": { + "context": 200000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 3", + "open_weights": false, + "reasoning": false, + "release_date": "2024-03-13", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-3-opus-20240229-v1:0": { + "attachment": true, + "cost": { + "input": 15, + "output": 75 + }, + "id": "anthropic.claude-3-opus-20240229-v1:0", + "knowledge": "2023-08", + "last_updated": "2024-02-29", + "limit": { + "context": 200000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 3", + "open_weights": false, + "reasoning": false, + "release_date": "2024-02-29", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-3-sonnet-20240229-v1:0": { + "attachment": true, + "cost": { + "input": 3, + "output": 15 + }, + "id": "anthropic.claude-3-sonnet-20240229-v1:0", + "knowledge": "2023-08", + "last_updated": "2024-03-04", + "limit": { + "context": 200000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3", + "open_weights": false, + "reasoning": false, + "release_date": "2024-03-04", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-haiku-4-5-20251001-v1:0": { + "attachment": true, + "cost": { + "cache_read": 0.1, + "cache_write": 1.25, + "input": 1, + "output": 5 + }, + "id": "anthropic.claude-haiku-4-5-20251001-v1:0", + "knowledge": "2025-02-28", + "last_updated": "2025-10-15", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 4.5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-10-15", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-instant-v1": { + "attachment": false, + "cost": { + "input": 0.8, + "output": 2.4 + }, + "id": "anthropic.claude-instant-v1", + "knowledge": "2023-08", + "last_updated": "2023-03-01", + "limit": { + "context": 100000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Claude Instant", + "open_weights": false, + "reasoning": false, + "release_date": "2023-03-01", + "temperature": true, + "tool_call": false + }, + "anthropic.claude-opus-4-1-20250805-v1:0": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "anthropic.claude-opus-4-1-20250805-v1:0", + "knowledge": "2025-03-31", + "last_updated": "2025-08-05", + "limit": { + "context": 200000, + "output": 32000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 4.1", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-opus-4-20250514-v1:0": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "anthropic.claude-opus-4-20250514-v1:0", + "knowledge": "2024-04", + "last_updated": "2025-05-22", + "limit": { + "context": 200000, + "output": 32000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 4", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-22", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-sonnet-4-20250514-v1:0": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "anthropic.claude-sonnet-4-20250514-v1:0", + "knowledge": "2024-04", + "last_updated": "2025-05-22", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 4", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-22", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-sonnet-4-5-20250929-v1:0": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "anthropic.claude-sonnet-4-5-20250929-v1:0", + "knowledge": "2025-07-31", + "last_updated": "2025-09-29", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 4.5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-29", + "temperature": true, + "tool_call": true + }, + "anthropic.claude-v2": { + "attachment": false, + "cost": { + "input": 8, + "output": 24 + }, + "id": "anthropic.claude-v2", + "knowledge": "2023-08", + "last_updated": "2023-07-11", + "limit": { + "context": 100000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Claude 2", + "open_weights": false, + "reasoning": false, + "release_date": "2023-07-11", + "temperature": true, + "tool_call": false + }, + "anthropic.claude-v2:1": { + "attachment": false, + "cost": { + "input": 8, + "output": 24 + }, + "id": "anthropic.claude-v2:1", + "knowledge": "2023-08", + "last_updated": "2023-11-21", + "limit": { + "context": 200000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Claude 2.1", + "open_weights": false, + "reasoning": false, + "release_date": "2023-11-21", + "temperature": true, + "tool_call": false + }, + "cohere.command-light-text-v14": { + "attachment": false, + "cost": { + "input": 0.3, + "output": 0.6 + }, + "id": "cohere.command-light-text-v14", + "knowledge": "2023-08", + "last_updated": "2023-11-01", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Command Light", + "open_weights": true, + "reasoning": false, + "release_date": "2023-11-01", + "temperature": true, + "tool_call": false + }, + "cohere.command-r-plus-v1:0": { + "attachment": false, + "cost": { + "input": 3, + "output": 15 + }, + "id": "cohere.command-r-plus-v1:0", + "knowledge": "2024-04", + "last_updated": "2024-04-04", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Command R+", + "open_weights": true, + "reasoning": false, + "release_date": "2024-04-04", + "temperature": true, + "tool_call": true + }, + "cohere.command-r-v1:0": { + "attachment": false, + "cost": { + "input": 0.5, + "output": 1.5 + }, + "id": "cohere.command-r-v1:0", + "knowledge": "2024-04", + "last_updated": "2024-03-11", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Command R", + "open_weights": true, + "reasoning": false, + "release_date": "2024-03-11", + "temperature": true, + "tool_call": true + }, + "cohere.command-text-v14": { + "attachment": false, + "cost": { + "input": 1.5, + "output": 2 + }, + "id": "cohere.command-text-v14", + "knowledge": "2023-08", + "last_updated": "2023-11-01", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Command", + "open_weights": true, + "reasoning": false, + "release_date": "2023-11-01", + "temperature": true, + "tool_call": false + }, + "deepseek.r1-v1:0": { + "attachment": false, + "cost": { + "input": 1.35, + "output": 5.4 + }, + "id": "deepseek.r1-v1:0", + "knowledge": "2024-07", + "last_updated": "2025-05-29", + "limit": { + "context": 128000, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek-R1", + "open_weights": false, + "reasoning": true, + "release_date": "2025-01-20", + "temperature": true, + "tool_call": true + }, + "deepseek.v3-v1:0": { + "attachment": false, + "cost": { + "input": 0.58, + "output": 1.68 + }, + "id": "deepseek.v3-v1:0", + "knowledge": "2024-07", + "last_updated": "2025-09-18", + "limit": { + "context": 163840, + "output": 81920 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek-V3.1", + "open_weights": true, + "reasoning": true, + "release_date": "2025-09-18", + "temperature": true, + "tool_call": true + }, + "meta.llama3-1-70b-instruct-v1:0": { + "attachment": false, + "cost": { + "input": 0.72, + "output": 0.72 + }, + "id": "meta.llama3-1-70b-instruct-v1:0", + "knowledge": "2023-12", + "last_updated": "2024-07-23", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.1 70B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-23", + "temperature": true, + "tool_call": true + }, + "meta.llama3-1-8b-instruct-v1:0": { + "attachment": false, + "cost": { + "input": 0.22, + "output": 0.22 + }, + "id": "meta.llama3-1-8b-instruct-v1:0", + "knowledge": "2023-12", + "last_updated": "2024-07-23", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.1 8B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-23", + "temperature": true, + "tool_call": true + }, + "meta.llama3-2-11b-instruct-v1:0": { + "attachment": true, + "cost": { + "input": 0.16, + "output": 0.16 + }, + "id": "meta.llama3-2-11b-instruct-v1:0", + "knowledge": "2023-12", + "last_updated": "2024-09-25", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.2 11B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-09-25", + "temperature": true, + "tool_call": true + }, + "meta.llama3-2-1b-instruct-v1:0": { + "attachment": false, + "cost": { + "input": 0.1, + "output": 0.1 + }, + "id": "meta.llama3-2-1b-instruct-v1:0", + "knowledge": "2023-12", + "last_updated": "2024-09-25", + "limit": { + "context": 131000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.2 1B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-09-25", + "temperature": true, + "tool_call": true + }, + "meta.llama3-2-3b-instruct-v1:0": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0.15 + }, + "id": "meta.llama3-2-3b-instruct-v1:0", + "knowledge": "2023-12", + "last_updated": "2024-09-25", + "limit": { + "context": 131000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.2 3B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-09-25", + "temperature": true, + "tool_call": true + }, + "meta.llama3-2-90b-instruct-v1:0": { + "attachment": true, + "cost": { + "input": 0.72, + "output": 0.72 + }, + "id": "meta.llama3-2-90b-instruct-v1:0", + "knowledge": "2023-12", + "last_updated": "2024-09-25", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.2 90B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-09-25", + "temperature": true, + "tool_call": true + }, + "meta.llama3-3-70b-instruct-v1:0": { + "attachment": false, + "cost": { + "input": 0.72, + "output": 0.72 + }, + "id": "meta.llama3-3-70b-instruct-v1:0", + "knowledge": "2023-12", + "last_updated": "2024-12-06", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.3 70B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-12-06", + "temperature": true, + "tool_call": true + }, + "meta.llama3-70b-instruct-v1:0": { + "attachment": false, + "cost": { + "input": 2.65, + "output": 3.5 + }, + "id": "meta.llama3-70b-instruct-v1:0", + "knowledge": "2023-12", + "last_updated": "2024-07-23", + "limit": { + "context": 8192, + "output": 2048 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3 70B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-23", + "temperature": true, + "tool_call": false + }, + "meta.llama3-8b-instruct-v1:0": { + "attachment": false, + "cost": { + "input": 0.3, + "output": 0.6 + }, + "id": "meta.llama3-8b-instruct-v1:0", + "knowledge": "2023-03", + "last_updated": "2024-07-23", + "limit": { + "context": 8192, + "output": 2048 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3 8B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-23", + "temperature": true, + "tool_call": false + }, + "meta.llama4-maverick-17b-instruct-v1:0": { + "attachment": true, + "cost": { + "input": 0.24, + "output": 0.97 + }, + "id": "meta.llama4-maverick-17b-instruct-v1:0", + "knowledge": "2024-08", + "last_updated": "2025-04-05", + "limit": { + "context": 1000000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Llama 4 Maverick 17B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-04-05", + "temperature": true, + "tool_call": true + }, + "meta.llama4-scout-17b-instruct-v1:0": { + "attachment": true, + "cost": { + "input": 0.17, + "output": 0.66 + }, + "id": "meta.llama4-scout-17b-instruct-v1:0", + "knowledge": "2024-08", + "last_updated": "2025-04-05", + "limit": { + "context": 3500000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Llama 4 Scout 17B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-04-05", + "temperature": true, + "tool_call": true + }, + "qwen.qwen3-235b-a22b-2507-v1:0": { + "attachment": false, + "cost": { + "input": 0.22, + "output": 0.88 + }, + "id": "qwen.qwen3-235b-a22b-2507-v1:0", + "knowledge": "2024-04", + "last_updated": "2025-09-18", + "limit": { + "context": 262144, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 235B A22B 2507", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-18", + "temperature": true, + "tool_call": true + }, + "qwen.qwen3-32b-v1:0": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0.6 + }, + "id": "qwen.qwen3-32b-v1:0", + "knowledge": "2024-04", + "last_updated": "2025-09-18", + "limit": { + "context": 16384, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 32B (dense)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-09-18", + "temperature": true, + "tool_call": true + }, + "qwen.qwen3-coder-30b-a3b-v1:0": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0.6 + }, + "id": "qwen.qwen3-coder-30b-a3b-v1:0", + "knowledge": "2024-04", + "last_updated": "2025-09-18", + "limit": { + "context": 262144, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 30B A3B Instruct", + "open_weights": false, + "reasoning": false, + "release_date": "2025-09-18", + "temperature": true, + "tool_call": true + }, + "qwen.qwen3-coder-480b-a35b-v1:0": { + "attachment": false, + "cost": { + "input": 0.22, + "output": 1.8 + }, + "id": "qwen.qwen3-coder-480b-a35b-v1:0", + "knowledge": "2024-04", + "last_updated": "2025-09-18", + "limit": { + "context": 131072, + "output": 65536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 480B A35B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-18", + "temperature": true, + "tool_call": true + } + }, + "name": "Amazon Bedrock", + "npm": "@ai-sdk/amazon-bedrock" + }, + "anthropic": { + "doc": "https://docs.anthropic.com/en/docs/about-claude/models", + "env": [ + "ANTHROPIC_API_KEY" + ], + "id": "anthropic", + "models": { + "claude-3-5-haiku-20241022": { + "attachment": true, + "cost": { + "cache_read": 0.08, + "cache_write": 1, + "input": 0.8, + "output": 4 + }, + "id": "claude-3-5-haiku-20241022", + "knowledge": "2024-07-31", + "last_updated": "2024-10-22", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 3.5", + "open_weights": false, + "reasoning": false, + "release_date": "2024-10-22", + "temperature": true, + "tool_call": true + }, + "claude-3-5-haiku-latest": { + "attachment": true, + "cost": { + "cache_read": 0.08, + "cache_write": 1, + "input": 0.8, + "output": 4 + }, + "id": "claude-3-5-haiku-latest", + "knowledge": "2024-07-31", + "last_updated": "2024-10-22", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 3.5 (latest)", + "open_weights": false, + "reasoning": false, + "release_date": "2024-10-22", + "temperature": true, + "tool_call": true + }, + "claude-3-5-sonnet-20240620": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-3-5-sonnet-20240620", + "knowledge": "2024-04-30", + "last_updated": "2024-06-20", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3.5", + "open_weights": false, + "reasoning": false, + "release_date": "2024-06-20", + "temperature": true, + "tool_call": true + }, + "claude-3-5-sonnet-20241022": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-3-5-sonnet-20241022", + "knowledge": "2024-04-30", + "last_updated": "2024-10-22", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3.5 v2", + "open_weights": false, + "reasoning": false, + "release_date": "2024-10-22", + "temperature": true, + "tool_call": true + }, + "claude-3-7-sonnet-20250219": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-3-7-sonnet-20250219", + "knowledge": "2024-10-31", + "last_updated": "2025-02-19", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3.7", + "open_weights": false, + "reasoning": true, + "release_date": "2025-02-19", + "temperature": true, + "tool_call": true + }, + "claude-3-7-sonnet-latest": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-3-7-sonnet-latest", + "knowledge": "2024-10-31", + "last_updated": "2025-02-19", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3.7 (latest)", + "open_weights": false, + "reasoning": true, + "release_date": "2025-02-19", + "temperature": true, + "tool_call": true + }, + "claude-3-haiku-20240307": { + "attachment": true, + "cost": { + "cache_read": 0.03, + "cache_write": 0.3, + "input": 0.25, + "output": 1.25 + }, + "id": "claude-3-haiku-20240307", + "knowledge": "2023-08-31", + "last_updated": "2024-03-13", + "limit": { + "context": 200000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 3", + "open_weights": false, + "reasoning": false, + "release_date": "2024-03-13", + "temperature": true, + "tool_call": true + }, + "claude-3-opus-20240229": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "claude-3-opus-20240229", + "knowledge": "2023-08-31", + "last_updated": "2024-02-29", + "limit": { + "context": 200000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 3", + "open_weights": false, + "reasoning": false, + "release_date": "2024-02-29", + "temperature": true, + "tool_call": true + }, + "claude-3-sonnet-20240229": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 0.3, + "input": 3, + "output": 15 + }, + "id": "claude-3-sonnet-20240229", + "knowledge": "2023-08-31", + "last_updated": "2024-03-04", + "limit": { + "context": 200000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3", + "open_weights": false, + "reasoning": false, + "release_date": "2024-03-04", + "temperature": true, + "tool_call": true + }, + "claude-haiku-4-5": { + "attachment": true, + "cost": { + "cache_read": 0.1, + "cache_write": 1.25, + "input": 1, + "output": 5 + }, + "id": "claude-haiku-4-5", + "knowledge": "2025-02-31", + "last_updated": "2025-10-15", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 4.5 (latest)", + "open_weights": false, + "reasoning": true, + "release_date": "2025-10-15", + "temperature": true, + "tool_call": true + }, + "claude-haiku-4-5-20251001": { + "attachment": true, + "cost": { + "cache_read": 0.1, + "cache_write": 1.25, + "input": 1, + "output": 5 + }, + "id": "claude-haiku-4-5-20251001", + "knowledge": "2025-02-31", + "last_updated": "2025-10-15", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 4.5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-10-15", + "temperature": true, + "tool_call": true + }, + "claude-opus-4-0": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "claude-opus-4-0", + "knowledge": "2025-03-31", + "last_updated": "2025-05-22", + "limit": { + "context": 200000, + "output": 32000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 4 (latest)", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-22", + "temperature": true, + "tool_call": true + }, + "claude-opus-4-1": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "claude-opus-4-1", + "knowledge": "2025-03-31", + "last_updated": "2025-08-05", + "limit": { + "context": 200000, + "output": 32000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 4.1 (latest)", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "claude-opus-4-1-20250805": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "claude-opus-4-1-20250805", + "knowledge": "2025-03-31", + "last_updated": "2025-08-05", + "limit": { + "context": 200000, + "output": 32000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 4.1", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "claude-opus-4-20250514": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "claude-opus-4-20250514", + "knowledge": "2025-03-31", + "last_updated": "2025-05-22", + "limit": { + "context": 200000, + "output": 32000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 4", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-22", + "temperature": true, + "tool_call": true + }, + "claude-sonnet-4-0": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-sonnet-4-0", + "knowledge": "2025-03-31", + "last_updated": "2025-05-22", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 4 (latest)", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-22", + "temperature": true, + "tool_call": true + }, + "claude-sonnet-4-20250514": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-sonnet-4-20250514", + "knowledge": "2025-03-31", + "last_updated": "2025-05-22", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 4", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-22", + "temperature": true, + "tool_call": true + }, + "claude-sonnet-4-5": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-sonnet-4-5", + "knowledge": "2025-07-31", + "last_updated": "2025-09-29", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 4.5 (latest)", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-29", + "temperature": true, + "tool_call": true + }, + "claude-sonnet-4-5-20250929": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-sonnet-4-5-20250929", + "knowledge": "2025-07-31", + "last_updated": "2025-09-29", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 4.5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-29", + "temperature": true, + "tool_call": true + } + }, + "name": "Anthropic", + "npm": "@ai-sdk/anthropic" + }, + "azure": { + "doc": "https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models", + "env": [ + "AZURE_RESOURCE_NAME", + "AZURE_API_KEY" + ], + "id": "azure", + "models": { + "codex-mini": { + "attachment": true, + "cost": { + "cache_read": 0.375, + "input": 1.5, + "output": 6 + }, + "id": "codex-mini", + "knowledge": "2024-04", + "last_updated": "2025-05-16", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Codex Mini", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-16", + "temperature": false, + "tool_call": true + }, + "gpt-3.5-turbo-0125": { + "attachment": false, + "cost": { + "input": 0.5, + "output": 1.5 + }, + "id": "gpt-3.5-turbo-0125", + "knowledge": "2021-08", + "last_updated": "2024-01-25", + "limit": { + "context": 16384, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT-3.5 Turbo 0125", + "open_weights": false, + "reasoning": false, + "release_date": "2024-01-25", + "temperature": true, + "tool_call": false + }, + "gpt-3.5-turbo-0301": { + "attachment": false, + "cost": { + "input": 1.5, + "output": 2 + }, + "id": "gpt-3.5-turbo-0301", + "knowledge": "2021-08", + "last_updated": "2023-03-01", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT-3.5 Turbo 0301", + "open_weights": false, + "reasoning": false, + "release_date": "2023-03-01", + "temperature": true, + "tool_call": false + }, + "gpt-3.5-turbo-0613": { + "attachment": false, + "cost": { + "input": 3, + "output": 4 + }, + "id": "gpt-3.5-turbo-0613", + "knowledge": "2021-08", + "last_updated": "2023-06-13", + "limit": { + "context": 16384, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT-3.5 Turbo 0613", + "open_weights": false, + "reasoning": false, + "release_date": "2023-06-13", + "temperature": true, + "tool_call": false + }, + "gpt-3.5-turbo-1106": { + "attachment": false, + "cost": { + "input": 1, + "output": 2 + }, + "id": "gpt-3.5-turbo-1106", + "knowledge": "2021-08", + "last_updated": "2023-11-06", + "limit": { + "context": 16384, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT-3.5 Turbo 1106", + "open_weights": false, + "reasoning": false, + "release_date": "2023-11-06", + "temperature": true, + "tool_call": false + }, + "gpt-3.5-turbo-instruct": { + "attachment": false, + "cost": { + "input": 1.5, + "output": 2 + }, + "id": "gpt-3.5-turbo-instruct", + "knowledge": "2021-08", + "last_updated": "2023-09-21", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT-3.5 Turbo Instruct", + "open_weights": false, + "reasoning": false, + "release_date": "2023-09-21", + "temperature": true, + "tool_call": false + }, + "gpt-4": { + "attachment": false, + "cost": { + "input": 60, + "output": 120 + }, + "id": "gpt-4", + "knowledge": "2023-11", + "last_updated": "2023-03-14", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4", + "open_weights": false, + "reasoning": false, + "release_date": "2023-03-14", + "temperature": true, + "tool_call": true + }, + "gpt-4-32k": { + "attachment": false, + "cost": { + "input": 60, + "output": 120 + }, + "id": "gpt-4-32k", + "knowledge": "2023-11", + "last_updated": "2023-03-14", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4 32K", + "open_weights": false, + "reasoning": false, + "release_date": "2023-03-14", + "temperature": true, + "tool_call": true + }, + "gpt-4-turbo": { + "attachment": true, + "cost": { + "input": 10, + "output": 30 + }, + "id": "gpt-4-turbo", + "knowledge": "2023-11", + "last_updated": "2024-04-09", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4 Turbo", + "open_weights": false, + "reasoning": false, + "release_date": "2023-11-06", + "temperature": true, + "tool_call": true + }, + "gpt-4-turbo-vision": { + "attachment": true, + "cost": { + "input": 10, + "output": 30 + }, + "id": "gpt-4-turbo-vision", + "knowledge": "2023-11", + "last_updated": "2024-04-09", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4 Turbo Vision", + "open_weights": false, + "reasoning": false, + "release_date": "2023-11-06", + "temperature": true, + "tool_call": true + }, + "gpt-4.1": { + "attachment": true, + "cost": { + "cache_read": 0.5, + "input": 2, + "output": 8 + }, + "id": "gpt-4.1", + "knowledge": "2024-05", + "last_updated": "2025-04-14", + "limit": { + "context": 1047576, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4.1", + "open_weights": false, + "reasoning": false, + "release_date": "2025-04-14", + "temperature": true, + "tool_call": true + }, + "gpt-4.1-mini": { + "attachment": true, + "cost": { + "cache_read": 0.1, + "input": 0.4, + "output": 1.6 + }, + "id": "gpt-4.1-mini", + "knowledge": "2024-05", + "last_updated": "2025-04-14", + "limit": { + "context": 1047576, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4.1 mini", + "open_weights": false, + "reasoning": false, + "release_date": "2025-04-14", + "temperature": true, + "tool_call": true + }, + "gpt-4.1-nano": { + "attachment": true, + "cost": { + "cache_read": 0.03, + "input": 0.1, + "output": 0.4 + }, + "id": "gpt-4.1-nano", + "knowledge": "2024-05", + "last_updated": "2025-04-14", + "limit": { + "context": 1047576, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4.1 nano", + "open_weights": false, + "reasoning": false, + "release_date": "2025-04-14", + "temperature": true, + "tool_call": true + }, + "gpt-4o": { + "attachment": true, + "cost": { + "cache_read": 1.25, + "input": 2.5, + "output": 10 + }, + "id": "gpt-4o", + "knowledge": "2023-09", + "last_updated": "2024-05-13", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4o", + "open_weights": false, + "reasoning": false, + "release_date": "2024-05-13", + "temperature": true, + "tool_call": true + }, + "gpt-4o-mini": { + "attachment": true, + "cost": { + "cache_read": 0.08, + "input": 0.15, + "output": 0.6 + }, + "id": "gpt-4o-mini", + "knowledge": "2023-09", + "last_updated": "2024-07-18", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4o mini", + "open_weights": false, + "reasoning": false, + "release_date": "2024-07-18", + "temperature": true, + "tool_call": true + }, + "gpt-5": { + "attachment": true, + "cost": { + "cache_read": 0.13, + "input": 1.25, + "output": 10 + }, + "id": "gpt-5", + "knowledge": "2024-09-30", + "last_updated": "2025-08-07", + "limit": { + "context": 272000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": false, + "tool_call": true + }, + "gpt-5-chat": { + "attachment": true, + "cost": { + "cache_read": 0.13, + "input": 1.25, + "output": 10 + }, + "id": "gpt-5-chat", + "knowledge": "2024-10-24", + "last_updated": "2025-08-07", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Chat", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": false, + "tool_call": false + }, + "gpt-5-codex": { + "attachment": false, + "id": "gpt-5-codex", + "knowledge": "2024-09-30", + "last_updated": "2025-09-15", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5-Codex", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-15", + "temperature": false, + "tool_call": true + }, + "gpt-5-mini": { + "attachment": true, + "cost": { + "cache_read": 0.03, + "input": 0.25, + "output": 2 + }, + "id": "gpt-5-mini", + "knowledge": "2024-05-30", + "last_updated": "2025-08-07", + "limit": { + "context": 272000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Mini", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": false, + "tool_call": true + }, + "gpt-5-nano": { + "attachment": true, + "cost": { + "cache_read": 0.01, + "input": 0.05, + "output": 0.4 + }, + "id": "gpt-5-nano", + "knowledge": "2024-05-30", + "last_updated": "2025-08-07", + "limit": { + "context": 272000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Nano", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": false, + "tool_call": true + }, + "o1": { + "attachment": false, + "cost": { + "cache_read": 7.5, + "input": 15, + "output": 60 + }, + "id": "o1", + "knowledge": "2023-09", + "last_updated": "2024-12-05", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o1", + "open_weights": false, + "reasoning": true, + "release_date": "2024-12-05", + "temperature": false, + "tool_call": true + }, + "o1-mini": { + "attachment": false, + "cost": { + "cache_read": 0.55, + "input": 1.1, + "output": 4.4 + }, + "id": "o1-mini", + "knowledge": "2023-09", + "last_updated": "2024-09-12", + "limit": { + "context": 128000, + "output": 65536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "o1-mini", + "open_weights": false, + "reasoning": true, + "release_date": "2024-09-12", + "temperature": false, + "tool_call": true + }, + "o1-preview": { + "attachment": false, + "cost": { + "cache_read": 8.25, + "input": 16.5, + "output": 66 + }, + "id": "o1-preview", + "knowledge": "2023-09", + "last_updated": "2024-09-12", + "limit": { + "context": 128000, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "o1-preview", + "open_weights": false, + "reasoning": true, + "release_date": "2024-09-12", + "temperature": false, + "tool_call": true + }, + "o3": { + "attachment": true, + "cost": { + "cache_read": 0.5, + "input": 2, + "output": 8 + }, + "id": "o3", + "knowledge": "2024-05", + "last_updated": "2025-04-16", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o3", + "open_weights": false, + "reasoning": true, + "release_date": "2025-04-16", + "temperature": false, + "tool_call": true + }, + "o3-mini": { + "attachment": false, + "cost": { + "cache_read": 0.55, + "input": 1.1, + "output": 4.4 + }, + "id": "o3-mini", + "knowledge": "2024-05", + "last_updated": "2025-01-29", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "o3-mini", + "open_weights": false, + "reasoning": true, + "release_date": "2024-12-20", + "temperature": false, + "tool_call": true + }, + "o4-mini": { + "attachment": true, + "cost": { + "cache_read": 0.28, + "input": 1.1, + "output": 4.4 + }, + "id": "o4-mini", + "knowledge": "2024-05", + "last_updated": "2025-04-16", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o4-mini", + "open_weights": false, + "reasoning": true, + "release_date": "2025-04-16", + "temperature": false, + "tool_call": true + } + }, + "name": "Azure", + "npm": "@ai-sdk/azure" + }, + "baseten": { + "api": "https://inference.baseten.co/v1", + "doc": "https://docs.baseten.co/development/model-apis/overview", + "env": [ + "BASETEN_API_KEY" + ], + "id": "baseten", + "models": { + "Qwen3/Qwen3-Coder-480B-A35B-Instruct": { + "attachment": false, + "cost": { + "input": 0.38, + "output": 1.53 + }, + "id": "Qwen3/Qwen3-Coder-480B-A35B-Instruct", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 262144, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 480B A35B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + }, + "moonshotai/Kimi-K2-Instruct-0905": { + "attachment": false, + "cost": { + "input": 0.6, + "output": 2.5 + }, + "id": "moonshotai/Kimi-K2-Instruct-0905", + "knowledge": "2025-08", + "last_updated": "2025-09-05", + "limit": { + "context": 262144, + "output": 262144 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 Instruct 0905", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-05", + "temperature": true, + "tool_call": true + }, + "zai-org/GLM-4.6": { + "attachment": false, + "cost": { + "input": 0.6, + "output": 2.2 + }, + "id": "zai-org/GLM-4.6", + "knowledge": "2025-08", + "last_updated": "2025-16-09", + "limit": { + "context": 200000, + "output": 200000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.6", + "open_weights": true, + "reasoning": false, + "release_date": "2025-16-09", + "temperature": true, + "tool_call": true + } + }, + "name": "Baseten", + "npm": "@ai-sdk/openai-compatible" + }, + "cerebras": { + "doc": "https://inference-docs.cerebras.ai/models/overview", + "env": [ + "CEREBRAS_API_KEY" + ], + "id": "cerebras", + "models": { + "gpt-oss-120b": { + "attachment": false, + "cost": { + "input": 0.25, + "output": 0.69 + }, + "id": "gpt-oss-120b", + "last_updated": "2025-08-05", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 120B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "qwen-3-235b-a22b-instruct-2507": { + "attachment": false, + "cost": { + "input": 0.6, + "output": 1.2 + }, + "id": "qwen-3-235b-a22b-instruct-2507", + "knowledge": "2025-04", + "last_updated": "2025-07-22", + "limit": { + "context": 131000, + "output": 32000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen 3 235B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-22", + "temperature": true, + "tool_call": true + }, + "qwen-3-coder-480b": { + "attachment": false, + "cost": { + "input": 2, + "output": 2 + }, + "id": "qwen-3-coder-480b", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 131000, + "output": 32000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen 3 Coder 480B", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + }, + "zai-glm-4.6": { + "attachment": false, + "cost": { + "cache_read": 0, + "cache_write": 0, + "input": 0, + "output": 0 + }, + "id": "zai-glm-4.6", + "last_updated": "2025-11-05", + "limit": { + "context": 131072, + "output": 40960 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Z.AI GLM-4.6", + "open_weights": true, + "reasoning": false, + "release_date": "2025-11-05", + "temperature": true, + "tool_call": true + } + }, + "name": "Cerebras", + "npm": "@ai-sdk/cerebras" + }, + "cloudflare-workers-ai": { + "doc": "https://developers.cloudflare.com/workers-ai/models/", + "env": [ + "CLOUDFLARE_ACCOUNT_ID", + "CLOUDFLARE_API_KEY" + ], + "id": "cloudflare-workers-ai", + "models": { + "aura-1": { + "attachment": false, + "cost": { + "input": 0.015, + "output": 0.015 + }, + "id": "aura-1", + "last_updated": "2025-07-07", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "audio" + ] + }, + "name": "@cf/deepgram/aura-1", + "open_weights": true, + "reasoning": false, + "release_date": "2025-08-27", + "temperature": false, + "tool_call": false + }, + "bart-large-cnn": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "bart-large-cnn", + "last_updated": "2024-02-13", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/facebook/bart-large-cnn", + "open_weights": true, + "reasoning": false, + "release_date": "2022-03-02", + "temperature": false, + "tool_call": false + }, + "deepseek-coder-6.7b-base-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek-coder-6.7b-base-awq", + "last_updated": "2023-11-09", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/thebloke/deepseek-coder-6.7b-base-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2023-11-05", + "temperature": true, + "tool_call": true + }, + "deepseek-coder-6.7b-instruct-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek-coder-6.7b-instruct-awq", + "last_updated": "2023-11-13", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/thebloke/deepseek-coder-6.7b-instruct-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2023-11-05", + "temperature": true, + "tool_call": true + }, + "deepseek-math-7b-instruct": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek-math-7b-instruct", + "last_updated": "2024-02-06", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/deepseek-ai/deepseek-math-7b-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-02-05", + "temperature": true, + "tool_call": true + }, + "deepseek-r1-distill-qwen-32b": { + "attachment": false, + "cost": { + "input": 0.5, + "output": 4.88 + }, + "id": "deepseek-r1-distill-qwen-32b", + "last_updated": "2025-02-24", + "limit": { + "context": 80000, + "output": 80000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b", + "open_weights": true, + "reasoning": true, + "release_date": "2025-01-20", + "temperature": true, + "tool_call": true + }, + "discolm-german-7b-v1-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "discolm-german-7b-v1-awq", + "last_updated": "2024-01-24", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/thebloke/discolm-german-7b-v1-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2024-01-18", + "temperature": true, + "tool_call": true + }, + "dreamshaper-8-lcm": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "dreamshaper-8-lcm", + "last_updated": "2023-12-07", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "image" + ] + }, + "name": "@cf/lykon/dreamshaper-8-lcm", + "open_weights": true, + "reasoning": false, + "release_date": "2023-12-06", + "temperature": false, + "tool_call": false + }, + "falcon-7b-instruct": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "falcon-7b-instruct", + "last_updated": "2024-10-12", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/tiiuae/falcon-7b-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2023-04-25", + "temperature": true, + "tool_call": true + }, + "flux-1-schnell": { + "attachment": false, + "cost": { + "input": 5.3e-05, + "output": 0.00011 + }, + "id": "flux-1-schnell", + "last_updated": "2024-08-16", + "limit": { + "context": 2048, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "image" + ] + }, + "name": "@cf/black-forest-labs/flux-1-schnell", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-31", + "temperature": false, + "tool_call": false + }, + "gemma-2b-it-lora": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "gemma-2b-it-lora", + "last_updated": "2024-04-02", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/google/gemma-2b-it-lora", + "open_weights": true, + "reasoning": false, + "release_date": "2024-04-02", + "temperature": true, + "tool_call": true + }, + "gemma-3-12b-it": { + "attachment": false, + "cost": { + "input": 0.35, + "output": 0.56 + }, + "id": "gemma-3-12b-it", + "last_updated": "2025-03-21", + "limit": { + "context": 80000, + "output": 80000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/google/gemma-3-12b-it", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-01", + "temperature": true, + "tool_call": true + }, + "gemma-7b-it": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "gemma-7b-it", + "last_updated": "2024-08-14", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/google/gemma-7b-it", + "open_weights": true, + "reasoning": false, + "release_date": "2024-02-13", + "temperature": true, + "tool_call": true + }, + "gemma-7b-it-lora": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "gemma-7b-it-lora", + "last_updated": "2024-04-02", + "limit": { + "context": 3500, + "output": 3500 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/google/gemma-7b-it-lora", + "open_weights": true, + "reasoning": false, + "release_date": "2024-04-02", + "temperature": true, + "tool_call": true + }, + "gpt-oss-120b": { + "attachment": false, + "cost": { + "input": 0.35, + "output": 0.75 + }, + "id": "gpt-oss-120b", + "last_updated": "2025-08-14", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/openai/gpt-oss-120b", + "open_weights": true, + "reasoning": false, + "release_date": "2025-08-04", + "temperature": false, + "tool_call": false + }, + "gpt-oss-20b": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.3 + }, + "id": "gpt-oss-20b", + "last_updated": "2025-08-14", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/openai/gpt-oss-20b", + "open_weights": true, + "reasoning": false, + "release_date": "2025-08-04", + "temperature": false, + "tool_call": false + }, + "hermes-2-pro-mistral-7b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "hermes-2-pro-mistral-7b", + "last_updated": "2024-09-08", + "limit": { + "context": 24000, + "output": 24000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/nousresearch/hermes-2-pro-mistral-7b", + "open_weights": true, + "reasoning": false, + "release_date": "2024-03-11", + "temperature": true, + "tool_call": true + }, + "llama-2-13b-chat-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "llama-2-13b-chat-awq", + "last_updated": "2023-11-09", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/thebloke/llama-2-13b-chat-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2023-09-19", + "temperature": true, + "tool_call": true + }, + "llama-2-7b-chat-fp16": { + "attachment": false, + "cost": { + "input": 0.56, + "output": 6.67 + }, + "id": "llama-2-7b-chat-fp16", + "last_updated": "2023-07-26", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-2-7b-chat-fp16", + "open_weights": true, + "reasoning": false, + "release_date": "2023-07-26", + "temperature": true, + "tool_call": true + }, + "llama-2-7b-chat-hf-lora": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "llama-2-7b-chat-hf-lora", + "last_updated": "2024-04-17", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta-llama/llama-2-7b-chat-hf-lora", + "open_weights": true, + "reasoning": false, + "release_date": "2023-07-13", + "temperature": true, + "tool_call": true + }, + "llama-2-7b-chat-int8": { + "attachment": false, + "id": "llama-2-7b-chat-int8", + "last_updated": "2023-09-25", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-2-7b-chat-int8", + "open_weights": true, + "reasoning": false, + "release_date": "2023-09-25", + "temperature": true, + "tool_call": true + }, + "llama-3-8b-instruct": { + "attachment": false, + "cost": { + "input": 0.28, + "output": 0.83 + }, + "id": "llama-3-8b-instruct", + "last_updated": "2025-06-19", + "limit": { + "context": 7968, + "output": 7968 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3-8b-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-04-17", + "temperature": true, + "tool_call": true + }, + "llama-3-8b-instruct-awq": { + "attachment": false, + "cost": { + "input": 0.12, + "output": 0.27 + }, + "id": "llama-3-8b-instruct-awq", + "last_updated": "2024-05-09", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3-8b-instruct-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2024-05-09", + "temperature": true, + "tool_call": true + }, + "llama-3.1-70b-instruct": { + "attachment": false, + "id": "llama-3.1-70b-instruct", + "last_updated": "2024-12-15", + "limit": { + "context": 24000, + "output": 24000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3.1-70b-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-16", + "temperature": true, + "tool_call": true + }, + "llama-3.1-8b-instruct": { + "attachment": false, + "cost": { + "input": 0.28, + "output": 0.83 + }, + "id": "llama-3.1-8b-instruct", + "last_updated": "2024-09-25", + "limit": { + "context": 7968, + "output": 7968 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3.1-8b-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-18", + "temperature": true, + "tool_call": true + }, + "llama-3.1-8b-instruct-awq": { + "attachment": false, + "cost": { + "input": 0.12, + "output": 0.27 + }, + "id": "llama-3.1-8b-instruct-awq", + "last_updated": "2024-07-25", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3.1-8b-instruct-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-25", + "temperature": true, + "tool_call": true + }, + "llama-3.1-8b-instruct-fast": { + "attachment": false, + "id": "llama-3.1-8b-instruct-fast", + "last_updated": "2024-09-25", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3.1-8b-instruct-fast", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-18", + "temperature": true, + "tool_call": true + }, + "llama-3.1-8b-instruct-fp8": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0.29 + }, + "id": "llama-3.1-8b-instruct-fp8", + "last_updated": "2024-07-25", + "limit": { + "context": 32000, + "output": 32000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3.1-8b-instruct-fp8", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-25", + "temperature": true, + "tool_call": true + }, + "llama-3.2-11b-vision-instruct": { + "attachment": false, + "cost": { + "input": 0.049, + "output": 0.68 + }, + "id": "llama-3.2-11b-vision-instruct", + "last_updated": "2024-12-04", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3.2-11b-vision-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-09-18", + "temperature": true, + "tool_call": true + }, + "llama-3.2-1b-instruct": { + "attachment": false, + "cost": { + "input": 0.027, + "output": 0.2 + }, + "id": "llama-3.2-1b-instruct", + "last_updated": "2024-10-24", + "limit": { + "context": 60000, + "output": 60000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3.2-1b-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-09-18", + "temperature": true, + "tool_call": true + }, + "llama-3.2-3b-instruct": { + "attachment": false, + "cost": { + "input": 0.051, + "output": 0.34 + }, + "id": "llama-3.2-3b-instruct", + "last_updated": "2024-10-24", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3.2-3b-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-09-18", + "temperature": true, + "tool_call": true + }, + "llama-3.3-70b-instruct-fp8-fast": { + "attachment": false, + "cost": { + "input": 0.29, + "output": 2.25 + }, + "id": "llama-3.3-70b-instruct-fp8-fast", + "last_updated": "2024-12-06", + "limit": { + "context": 24000, + "output": 24000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-3.3-70b-instruct-fp8-fast", + "open_weights": true, + "reasoning": false, + "release_date": "2024-12-06", + "temperature": true, + "tool_call": true + }, + "llama-4-scout-17b-16e-instruct": { + "attachment": false, + "cost": { + "input": 0.27, + "output": 0.85 + }, + "id": "llama-4-scout-17b-16e-instruct", + "last_updated": "2025-05-23", + "limit": { + "context": 131000, + "output": 131000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-4-scout-17b-16e-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-04-02", + "temperature": true, + "tool_call": true + }, + "llama-guard-3-8b": { + "attachment": false, + "cost": { + "input": 0.48, + "output": 0.03 + }, + "id": "llama-guard-3-8b", + "last_updated": "2024-10-11", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/llama-guard-3-8b", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-22", + "temperature": true, + "tool_call": false + }, + "llamaguard-7b-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "llamaguard-7b-awq", + "last_updated": "2023-12-11", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/thebloke/llamaguard-7b-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2023-12-11", + "temperature": true, + "tool_call": true + }, + "llava-1.5-7b-hf": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "llava-1.5-7b-hf", + "last_updated": "2025-06-06", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "image", + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/llava-hf/llava-1.5-7b-hf", + "open_weights": true, + "reasoning": false, + "release_date": "2023-12-05", + "temperature": true, + "tool_call": false + }, + "lucid-origin": { + "attachment": false, + "cost": { + "input": 0.007, + "output": 0.007 + }, + "id": "lucid-origin", + "last_updated": "2025-08-05", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "image" + ] + }, + "name": "@cf/leonardo/lucid-origin", + "open_weights": false, + "reasoning": false, + "release_date": "2025-08-25", + "temperature": false, + "tool_call": false + }, + "m2m100-1.2b": { + "attachment": false, + "cost": { + "input": 0.34, + "output": 0.34 + }, + "id": "m2m100-1.2b", + "last_updated": "2023-11-16", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/meta/m2m100-1.2b", + "open_weights": true, + "reasoning": false, + "release_date": "2022-03-02", + "temperature": false, + "tool_call": false + }, + "melotts": { + "attachment": true, + "cost": { + "input": 0.0002, + "output": 0 + }, + "id": "melotts", + "last_updated": "2024-07-19", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "audio" + ] + }, + "name": "@cf/myshell-ai/melotts", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-19", + "temperature": false, + "tool_call": false + }, + "mistral-7b-instruct-v0.1": { + "attachment": false, + "cost": { + "input": 0.11, + "output": 0.19 + }, + "id": "mistral-7b-instruct-v0.1", + "last_updated": "2025-07-24", + "limit": { + "context": 2824, + "output": 2824 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/mistral/mistral-7b-instruct-v0.1", + "open_weights": true, + "reasoning": false, + "release_date": "2023-09-27", + "temperature": true, + "tool_call": true + }, + "mistral-7b-instruct-v0.1-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "mistral-7b-instruct-v0.1-awq", + "last_updated": "2023-11-09", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/thebloke/mistral-7b-instruct-v0.1-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2023-09-27", + "temperature": true, + "tool_call": true + }, + "mistral-7b-instruct-v0.2": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "mistral-7b-instruct-v0.2", + "last_updated": "2025-07-24", + "limit": { + "context": 3072, + "output": 3072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/mistral/mistral-7b-instruct-v0.2", + "open_weights": true, + "reasoning": false, + "release_date": "2023-12-11", + "temperature": true, + "tool_call": true + }, + "mistral-7b-instruct-v0.2-lora": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "mistral-7b-instruct-v0.2-lora", + "last_updated": "2024-04-01", + "limit": { + "context": 15000, + "output": 15000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/mistral/mistral-7b-instruct-v0.2-lora", + "open_weights": true, + "reasoning": false, + "release_date": "2024-04-01", + "temperature": true, + "tool_call": true + }, + "mistral-small-3.1-24b-instruct": { + "attachment": false, + "cost": { + "input": 0.35, + "output": 0.56 + }, + "id": "mistral-small-3.1-24b-instruct", + "last_updated": "2025-07-28", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/mistralai/mistral-small-3.1-24b-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-11", + "temperature": true, + "tool_call": true + }, + "neural-chat-7b-v3-1-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "neural-chat-7b-v3-1-awq", + "last_updated": "2023-11-17", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/thebloke/neural-chat-7b-v3-1-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2023-11-15", + "temperature": true, + "tool_call": true + }, + "nova-3": { + "attachment": false, + "cost": { + "input": 0.0052, + "output": 0.0052 + }, + "id": "nova-3", + "last_updated": "2025-07-08", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "audio" + ], + "output": [ + "text" + ] + }, + "name": "@cf/deepgram/nova-3", + "open_weights": true, + "reasoning": false, + "release_date": "2025-06-05", + "temperature": false, + "tool_call": false + }, + "openchat-3.5-0106": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "openchat-3.5-0106", + "last_updated": "2024-05-18", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/openchat/openchat-3.5-0106", + "open_weights": true, + "reasoning": false, + "release_date": "2024-01-07", + "temperature": true, + "tool_call": true + }, + "openhermes-2.5-mistral-7b-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "openhermes-2.5-mistral-7b-awq", + "last_updated": "2023-11-09", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/thebloke/openhermes-2.5-mistral-7b-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2023-11-02", + "temperature": true, + "tool_call": true + }, + "phi-2": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "phi-2", + "last_updated": "2024-04-29", + "limit": { + "context": 2048, + "output": 2048 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/microsoft/phi-2", + "open_weights": true, + "reasoning": false, + "release_date": "2023-12-13", + "temperature": true, + "tool_call": true + }, + "phoenix-1.0": { + "attachment": false, + "cost": { + "input": 0.0058, + "output": 0.0058 + }, + "id": "phoenix-1.0", + "last_updated": "2025-08-25", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "image" + ] + }, + "name": "@cf/leonardo/phoenix-1.0", + "open_weights": false, + "reasoning": false, + "release_date": "2025-08-25", + "temperature": false, + "tool_call": false + }, + "qwen1.5-0.5b-chat": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen1.5-0.5b-chat", + "last_updated": "2024-04-30", + "limit": { + "context": 32000, + "output": 32000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/qwen/qwen1.5-0.5b-chat", + "open_weights": true, + "reasoning": false, + "release_date": "2024-01-31", + "temperature": true, + "tool_call": true + }, + "qwen1.5-1.8b-chat": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen1.5-1.8b-chat", + "last_updated": "2024-04-30", + "limit": { + "context": 32000, + "output": 32000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/qwen/qwen1.5-1.8b-chat", + "open_weights": true, + "reasoning": false, + "release_date": "2024-01-30", + "temperature": true, + "tool_call": true + }, + "qwen1.5-14b-chat-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen1.5-14b-chat-awq", + "last_updated": "2024-04-30", + "limit": { + "context": 7500, + "output": 7500 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/qwen/qwen1.5-14b-chat-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2024-02-03", + "temperature": true, + "tool_call": true + }, + "qwen1.5-7b-chat-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen1.5-7b-chat-awq", + "last_updated": "2024-04-30", + "limit": { + "context": 20000, + "output": 20000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/qwen/qwen1.5-7b-chat-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2024-02-03", + "temperature": true, + "tool_call": true + }, + "qwen2.5-coder-32b-instruct": { + "attachment": false, + "cost": { + "input": 0.66, + "output": 1 + }, + "id": "qwen2.5-coder-32b-instruct", + "last_updated": "2025-01-12", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/qwen/qwen2.5-coder-32b-instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-11-06", + "temperature": true, + "tool_call": true + }, + "qwq-32b": { + "attachment": false, + "cost": { + "input": 0.66, + "output": 1 + }, + "id": "qwq-32b", + "last_updated": "2025-03-11", + "limit": { + "context": 24000, + "output": 24000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/qwen/qwq-32b", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-05", + "temperature": true, + "tool_call": true + }, + "resnet-50": { + "attachment": false, + "cost": { + "input": 2.5e-06, + "output": 0 + }, + "id": "resnet-50", + "last_updated": "2024-02-13", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "image" + ], + "output": [ + "text" + ] + }, + "name": "@cf/microsoft/resnet-50", + "open_weights": true, + "reasoning": false, + "release_date": "2022-03-16", + "temperature": false, + "tool_call": false + }, + "sqlcoder-7b-2": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "sqlcoder-7b-2", + "last_updated": "2024-02-12", + "limit": { + "context": 10000, + "output": 10000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/defog/sqlcoder-7b-2", + "open_weights": true, + "reasoning": false, + "release_date": "2024-02-05", + "temperature": true, + "tool_call": true + }, + "stable-diffusion-v1-5-img2img": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "stable-diffusion-v1-5-img2img", + "last_updated": "2024-02-27", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "image" + ] + }, + "name": "@cf/runwayml/stable-diffusion-v1-5-img2img", + "open_weights": true, + "reasoning": false, + "release_date": "2024-02-27", + "temperature": false, + "tool_call": false + }, + "stable-diffusion-v1-5-inpainting": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "stable-diffusion-v1-5-inpainting", + "last_updated": "2024-02-27", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "image" + ] + }, + "name": "@cf/runwayml/stable-diffusion-v1-5-inpainting", + "open_weights": true, + "reasoning": false, + "release_date": "2024-02-27", + "temperature": false, + "tool_call": false + }, + "stable-diffusion-xl-base-1.0": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "stable-diffusion-xl-base-1.0", + "last_updated": "2023-10-30", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "image" + ] + }, + "name": "@cf/stabilityai/stable-diffusion-xl-base-1.0", + "open_weights": true, + "reasoning": false, + "release_date": "2023-07-25", + "temperature": false, + "tool_call": false + }, + "stable-diffusion-xl-lightning": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "stable-diffusion-xl-lightning", + "last_updated": "2024-04-03", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "image" + ] + }, + "name": "@cf/bytedance/stable-diffusion-xl-lightning", + "open_weights": true, + "reasoning": false, + "release_date": "2024-02-20", + "temperature": false, + "tool_call": false + }, + "starling-lm-7b-beta": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "starling-lm-7b-beta", + "last_updated": "2024-04-03", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/nexusflow/starling-lm-7b-beta", + "open_weights": true, + "reasoning": false, + "release_date": "2024-03-19", + "temperature": true, + "tool_call": true + }, + "tinyllama-1.1b-chat-v1.0": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "tinyllama-1.1b-chat-v1.0", + "last_updated": "2024-03-17", + "limit": { + "context": 2048, + "output": 2048 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/tinyllama/tinyllama-1.1b-chat-v1.0", + "open_weights": true, + "reasoning": false, + "release_date": "2023-12-30", + "temperature": true, + "tool_call": true + }, + "uform-gen2-qwen-500m": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "uform-gen2-qwen-500m", + "last_updated": "2024-04-24", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "image", + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/unum/uform-gen2-qwen-500m", + "open_weights": true, + "reasoning": false, + "release_date": "2024-02-15", + "temperature": false, + "tool_call": false + }, + "una-cybertron-7b-v2-bf16": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "una-cybertron-7b-v2-bf16", + "last_updated": "2024-03-08", + "limit": { + "context": 15000, + "output": 15000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@cf/fblgit/una-cybertron-7b-v2-bf16", + "open_weights": true, + "reasoning": false, + "release_date": "2023-12-02", + "temperature": true, + "tool_call": true + }, + "whisper": { + "attachment": false, + "cost": { + "input": 0.00045, + "output": 0.00045 + }, + "id": "whisper", + "last_updated": "2024-08-12", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "audio" + ], + "output": [ + "text" + ] + }, + "name": "@cf/openai/whisper", + "open_weights": true, + "reasoning": false, + "release_date": "2023-11-07", + "temperature": false, + "tool_call": false + }, + "whisper-large-v3-turbo": { + "attachment": false, + "cost": { + "input": 0.00051, + "output": 0.00051 + }, + "id": "whisper-large-v3-turbo", + "last_updated": "2024-10-04", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "audio" + ], + "output": [ + "text" + ] + }, + "name": "@cf/openai/whisper-large-v3-turbo", + "open_weights": true, + "reasoning": false, + "release_date": "2024-10-01", + "temperature": false, + "tool_call": false + }, + "whisper-tiny-en": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "whisper-tiny-en", + "last_updated": "2024-01-22", + "limit": { + "context": 0, + "output": 0 + }, + "modalities": { + "input": [ + "audio" + ], + "output": [ + "text" + ] + }, + "name": "@cf/openai/whisper-tiny-en", + "open_weights": true, + "reasoning": false, + "release_date": "2022-09-26", + "temperature": false, + "tool_call": false + }, + "zephyr-7b-beta-awq": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "zephyr-7b-beta-awq", + "last_updated": "2023-11-09", + "limit": { + "context": 4096, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "@hf/thebloke/zephyr-7b-beta-awq", + "open_weights": true, + "reasoning": false, + "release_date": "2023-10-27", + "temperature": true, + "tool_call": true + } + }, + "name": "Cloudflare Workers AI", + "npm": "workers-ai-provider" + }, + "deepinfra": { + "doc": "https://deepinfra.com/models", + "env": [ + "DEEPINFRA_API_KEY" + ], + "id": "deepinfra", + "models": { + "Qwen/Qwen3-Coder-480B-A35B-Instruct": { + "attachment": false, + "cost": { + "input": 0.4, + "output": 1.6 + }, + "id": "Qwen/Qwen3-Coder-480B-A35B-Instruct", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 262144, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 480B A35B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + }, + "Qwen/Qwen3-Coder-480B-A35B-Instruct-Turbo": { + "attachment": false, + "cost": { + "input": 0.3, + "output": 1.2 + }, + "id": "Qwen/Qwen3-Coder-480B-A35B-Instruct-Turbo", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 262144, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 480B A35B Instruct Turbo", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + }, + "moonshotai/Kimi-K2-Instruct": { + "attachment": false, + "cost": { + "input": 0.5, + "output": 2 + }, + "id": "moonshotai/Kimi-K2-Instruct", + "knowledge": "2024-10", + "last_updated": "2025-07-11", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-11", + "temperature": true, + "tool_call": true + }, + "zai-org/GLM-4.5": { + "attachment": false, + "cost": { + "input": 0.6, + "output": 2.2 + }, + "id": "zai-org/GLM-4.5", + "knowledge": "2025-04", + "last_updated": "2025-07-28", + "limit": { + "context": 131072, + "output": 98304 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM-4.5", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-28", + "temperature": true, + "tool_call": true + } + }, + "name": "Deep Infra", + "npm": "@ai-sdk/deepinfra" + }, + "deepseek": { + "api": "https://api.deepseek.com", + "doc": "https://platform.deepseek.com/api-docs/pricing", + "env": [ + "DEEPSEEK_API_KEY" + ], + "id": "deepseek", + "models": { + "deepseek-chat": { + "attachment": true, + "cost": { + "cache_read": 0.07, + "input": 0.57, + "output": 1.68 + }, + "id": "deepseek-chat", + "knowledge": "2024-07", + "last_updated": "2025-08-21", + "limit": { + "context": 128000, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek Chat", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-26", + "temperature": true, + "tool_call": true + }, + "deepseek-reasoner": { + "attachment": true, + "cost": { + "cache_read": 0.07, + "input": 0.57, + "output": 1.68 + }, + "id": "deepseek-reasoner", + "knowledge": "2024-07", + "last_updated": "2025-08-21", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek Reasoner", + "open_weights": false, + "reasoning": true, + "release_date": "2025-01-20", + "temperature": true, + "tool_call": true + } + }, + "name": "DeepSeek", + "npm": "@ai-sdk/openai-compatible" + }, + "fireworks-ai": { + "api": "https://api.fireworks.ai/inference/v1/", + "doc": "https://fireworks.ai/docs/", + "env": [ + "FIREWORKS_API_KEY" + ], + "id": "fireworks-ai", + "models": { + "accounts/fireworks/models/deepseek-r1-0528": { + "attachment": false, + "cost": { + "input": 3, + "output": 8 + }, + "id": "accounts/fireworks/models/deepseek-r1-0528", + "knowledge": "2025-05", + "last_updated": "2025-05-28", + "limit": { + "context": 160000, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Deepseek R1 05/28", + "open_weights": true, + "reasoning": true, + "release_date": "2025-05-28", + "temperature": true, + "tool_call": true + }, + "accounts/fireworks/models/deepseek-v3-0324": { + "attachment": false, + "cost": { + "input": 0.9, + "output": 0.9 + }, + "id": "accounts/fireworks/models/deepseek-v3-0324", + "knowledge": "2024-10", + "last_updated": "2025-03-24", + "limit": { + "context": 160000, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Deepseek V3 03-24", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-24", + "temperature": true, + "tool_call": true + }, + "accounts/fireworks/models/deepseek-v3p1": { + "attachment": false, + "cost": { + "input": 0.56, + "output": 1.68 + }, + "id": "accounts/fireworks/models/deepseek-v3p1", + "knowledge": "2025-07", + "last_updated": "2025-08-21", + "limit": { + "context": 163840, + "output": 163840 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek V3.1", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-21", + "temperature": true, + "tool_call": true + }, + "accounts/fireworks/models/glm-4p5": { + "attachment": false, + "cost": { + "input": 0.55, + "output": 2.19 + }, + "id": "accounts/fireworks/models/glm-4p5", + "knowledge": "2025-04", + "last_updated": "2025-07-29", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.5", + "open_weights": true, + "reasoning": true, + "release_date": "2025-07-29", + "temperature": true, + "tool_call": true + }, + "accounts/fireworks/models/glm-4p5-air": { + "attachment": false, + "cost": { + "input": 0.22, + "output": 0.88 + }, + "id": "accounts/fireworks/models/glm-4p5-air", + "knowledge": "2025-04", + "last_updated": "2025-08-01", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.5 Air", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-01", + "temperature": true, + "tool_call": true + }, + "accounts/fireworks/models/gpt-oss-120b": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0.6 + }, + "id": "accounts/fireworks/models/gpt-oss-120b", + "last_updated": "2025-08-05", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 120B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "accounts/fireworks/models/gpt-oss-20b": { + "attachment": false, + "cost": { + "input": 0.05, + "output": 0.2 + }, + "id": "accounts/fireworks/models/gpt-oss-20b", + "last_updated": "2025-08-05", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 20B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "accounts/fireworks/models/kimi-k2-instruct": { + "attachment": false, + "cost": { + "input": 1, + "output": 3 + }, + "id": "accounts/fireworks/models/kimi-k2-instruct", + "knowledge": "2024-10", + "last_updated": "2025-07-11", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-11", + "temperature": true, + "tool_call": true + }, + "accounts/fireworks/models/qwen3-235b-a22b": { + "attachment": false, + "cost": { + "input": 0.22, + "output": 0.88 + }, + "id": "accounts/fireworks/models/qwen3-235b-a22b", + "knowledge": "2025-04", + "last_updated": "2025-04-29", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 235B-A22B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-04-29", + "temperature": true, + "tool_call": true + }, + "accounts/fireworks/models/qwen3-coder-480b-a35b-instruct": { + "attachment": false, + "cost": { + "input": 0.45, + "output": 1.8 + }, + "id": "accounts/fireworks/models/qwen3-coder-480b-a35b-instruct", + "last_updated": "2025-07-22", + "limit": { + "context": 256000, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 480B A35B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-22", + "temperature": true, + "tool_call": true + } + }, + "name": "Fireworks AI", + "npm": "@ai-sdk/openai-compatible" + }, + "google": { + "doc": "https://ai.google.dev/gemini-api/docs/pricing", + "env": [ + "GOOGLE_GENERATIVE_AI_API_KEY", + "GEMINI_API_KEY" + ], + "id": "google", + "models": { + "gemini-1.5-flash": { + "attachment": true, + "cost": { + "cache_read": 0.01875, + "input": 0.075, + "output": 0.3 + }, + "id": "gemini-1.5-flash", + "knowledge": "2024-04", + "last_updated": "2024-05-14", + "limit": { + "context": 1000000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 1.5 Flash", + "open_weights": false, + "reasoning": false, + "release_date": "2024-05-14", + "temperature": true, + "tool_call": true + }, + "gemini-1.5-flash-8b": { + "attachment": true, + "cost": { + "cache_read": 0.01, + "input": 0.0375, + "output": 0.15 + }, + "id": "gemini-1.5-flash-8b", + "knowledge": "2024-04", + "last_updated": "2024-10-03", + "limit": { + "context": 1000000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 1.5 Flash-8B", + "open_weights": false, + "reasoning": false, + "release_date": "2024-10-03", + "temperature": true, + "tool_call": true + }, + "gemini-1.5-pro": { + "attachment": true, + "cost": { + "cache_read": 0.3125, + "input": 1.25, + "output": 5 + }, + "id": "gemini-1.5-pro", + "knowledge": "2024-04", + "last_updated": "2024-02-15", + "limit": { + "context": 1000000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 1.5 Pro", + "open_weights": false, + "reasoning": false, + "release_date": "2024-02-15", + "temperature": true, + "tool_call": true + }, + "gemini-2.0-flash": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "gemini-2.0-flash", + "knowledge": "2024-06", + "last_updated": "2024-12-11", + "limit": { + "context": 1048576, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.0 Flash", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-11", + "temperature": true, + "tool_call": true + }, + "gemini-2.0-flash-lite": { + "attachment": true, + "cost": { + "input": 0.075, + "output": 0.3 + }, + "id": "gemini-2.0-flash-lite", + "knowledge": "2024-06", + "last_updated": "2024-12-11", + "limit": { + "context": 1048576, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.0 Flash Lite", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-11", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash": { + "attachment": true, + "cost": { + "cache_read": 0.075, + "input": 0.3, + "input_audio": 1, + "output": 2.5 + }, + "id": "gemini-2.5-flash", + "knowledge": "2025-01", + "last_updated": "2025-06-05", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash", + "open_weights": false, + "reasoning": true, + "release_date": "2025-03-20", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-image": { + "attachment": true, + "cost": { + "cache_read": 0.075, + "input": 0.3, + "output": 30 + }, + "id": "gemini-2.5-flash-image", + "knowledge": "2025-06", + "last_updated": "2025-08-26", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text", + "image" + ] + }, + "name": "Gemini 2.5 Flash Image", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-26", + "temperature": true, + "tool_call": false + }, + "gemini-2.5-flash-image-preview": { + "attachment": true, + "cost": { + "cache_read": 0.075, + "input": 0.3, + "output": 30 + }, + "id": "gemini-2.5-flash-image-preview", + "knowledge": "2025-06", + "last_updated": "2025-08-26", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text", + "image" + ] + }, + "name": "Gemini 2.5 Flash Image (Preview)", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-26", + "temperature": true, + "tool_call": false + }, + "gemini-2.5-flash-lite": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "gemini-2.5-flash-lite", + "knowledge": "2025-01", + "last_updated": "2025-06-17", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Lite", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-17", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-lite-preview-06-17": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "input_audio": 0.3, + "output": 0.4 + }, + "id": "gemini-2.5-flash-lite-preview-06-17", + "knowledge": "2025-01", + "last_updated": "2025-06-17", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Lite Preview 06-17", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-17", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-lite-preview-09-2025": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "gemini-2.5-flash-lite-preview-09-2025", + "knowledge": "2025-01", + "last_updated": "2025-09-25", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Lite Preview 09-25", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-25", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-preview-04-17": { + "attachment": true, + "cost": { + "cache_read": 0.0375, + "input": 0.15, + "output": 0.6 + }, + "id": "gemini-2.5-flash-preview-04-17", + "knowledge": "2025-01", + "last_updated": "2025-04-17", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Preview 04-17", + "open_weights": false, + "reasoning": true, + "release_date": "2025-04-17", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-preview-05-20": { + "attachment": true, + "cost": { + "cache_read": 0.0375, + "input": 0.15, + "output": 0.6 + }, + "id": "gemini-2.5-flash-preview-05-20", + "knowledge": "2025-01", + "last_updated": "2025-05-20", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Preview 05-20", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-20", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-preview-09-2025": { + "attachment": true, + "cost": { + "cache_read": 0.075, + "input": 0.3, + "input_audio": 1, + "output": 2.5 + }, + "id": "gemini-2.5-flash-preview-09-2025", + "knowledge": "2025-01", + "last_updated": "2025-09-25", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Preview 09-25", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-25", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-preview-tts": { + "attachment": false, + "cost": { + "input": 0.5, + "output": 10 + }, + "id": "gemini-2.5-flash-preview-tts", + "knowledge": "2025-01", + "last_updated": "2025-05-01", + "limit": { + "context": 8000, + "output": 16000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "audio" + ] + }, + "name": "Gemini 2.5 Flash Preview TTS", + "open_weights": false, + "reasoning": false, + "release_date": "2025-05-01", + "temperature": false, + "tool_call": false + }, + "gemini-2.5-pro": { + "attachment": true, + "cost": { + "cache_read": 0.31, + "input": 1.25, + "output": 10 + }, + "id": "gemini-2.5-pro", + "knowledge": "2025-01", + "last_updated": "2025-06-05", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Pro", + "open_weights": false, + "reasoning": true, + "release_date": "2025-03-20", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-pro-preview-05-06": { + "attachment": true, + "cost": { + "cache_read": 0.31, + "input": 1.25, + "output": 10 + }, + "id": "gemini-2.5-pro-preview-05-06", + "knowledge": "2025-01", + "last_updated": "2025-05-06", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Pro Preview 05-06", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-06", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-pro-preview-06-05": { + "attachment": true, + "cost": { + "cache_read": 0.31, + "input": 1.25, + "output": 10 + }, + "id": "gemini-2.5-pro-preview-06-05", + "knowledge": "2025-01", + "last_updated": "2025-06-05", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Pro Preview 06-05", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-05", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-pro-preview-tts": { + "attachment": false, + "cost": { + "input": 1, + "output": 20 + }, + "id": "gemini-2.5-pro-preview-tts", + "knowledge": "2025-01", + "last_updated": "2025-05-01", + "limit": { + "context": 8000, + "output": 16000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "audio" + ] + }, + "name": "Gemini 2.5 Pro Preview TTS", + "open_weights": false, + "reasoning": false, + "release_date": "2025-05-01", + "temperature": false, + "tool_call": false + }, + "gemini-embedding-001": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0 + }, + "id": "gemini-embedding-001", + "knowledge": "2025-05", + "last_updated": "2025-05-20", + "limit": { + "context": 2048, + "output": 3072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Gemini Embedding 001", + "open_weights": false, + "reasoning": false, + "release_date": "2025-05-20", + "temperature": false, + "tool_call": false + }, + "gemini-flash-latest": { + "attachment": true, + "cost": { + "cache_read": 0.075, + "input": 0.3, + "input_audio": 1, + "output": 2.5 + }, + "id": "gemini-flash-latest", + "knowledge": "2025-01", + "last_updated": "2025-09-25", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini Flash Latest", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-25", + "temperature": true, + "tool_call": true + }, + "gemini-flash-lite-latest": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "gemini-flash-lite-latest", + "knowledge": "2025-01", + "last_updated": "2025-09-25", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini Flash-Lite Latest", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-25", + "temperature": true, + "tool_call": true + }, + "gemini-live-2.5-flash": { + "attachment": true, + "cost": { + "input": 0.5, + "input_audio": 3, + "output": 2, + "output_audio": 12 + }, + "id": "gemini-live-2.5-flash", + "knowledge": "2025-01", + "last_updated": "2025-09-01", + "limit": { + "context": 128000, + "output": 8000 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video" + ], + "output": [ + "text", + "audio" + ] + }, + "name": "Gemini Live 2.5 Flash", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-01", + "temperature": true, + "tool_call": true + }, + "gemini-live-2.5-flash-preview-native-audio": { + "attachment": false, + "cost": { + "input": 0.5, + "input_audio": 3, + "output": 2, + "output_audio": 12 + }, + "id": "gemini-live-2.5-flash-preview-native-audio", + "knowledge": "2025-01", + "last_updated": "2025-09-18", + "limit": { + "context": 131072, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "audio", + "video" + ], + "output": [ + "text", + "audio" + ] + }, + "name": "Gemini Live 2.5 Flash Preview Native Audio", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-17", + "temperature": false, + "tool_call": true + } + }, + "name": "Google", + "npm": "@ai-sdk/google" + }, + "google-vertex": { + "doc": "https://cloud.google.com/vertex-ai/generative-ai/docs/models", + "env": [ + "GOOGLE_VERTEX_PROJECT", + "GOOGLE_VERTEX_LOCATION", + "GOOGLE_APPLICATION_CREDENTIALS" + ], + "id": "google-vertex", + "models": { + "gemini-2.0-flash": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "gemini-2.0-flash", + "knowledge": "2024-06", + "last_updated": "2024-12-11", + "limit": { + "context": 1048576, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.0 Flash", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-11", + "temperature": true, + "tool_call": true + }, + "gemini-2.0-flash-lite": { + "attachment": true, + "cost": { + "input": 0.075, + "output": 0.3 + }, + "id": "gemini-2.0-flash-lite", + "knowledge": "2024-06", + "last_updated": "2024-12-11", + "limit": { + "context": 1048576, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.0 Flash Lite", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-11", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash": { + "attachment": true, + "cost": { + "cache_read": 0.075, + "cache_write": 0.383, + "input": 0.3, + "output": 2.5 + }, + "id": "gemini-2.5-flash", + "knowledge": "2025-01", + "last_updated": "2025-06-17", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-17", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-lite": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "gemini-2.5-flash-lite", + "knowledge": "2025-01", + "last_updated": "2025-06-17", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Lite", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-17", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-lite-preview-06-17": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "gemini-2.5-flash-lite-preview-06-17", + "knowledge": "2025-01", + "last_updated": "2025-06-17", + "limit": { + "context": 65536, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Lite Preview 06-17", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-17", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-lite-preview-09-2025": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "gemini-2.5-flash-lite-preview-09-2025", + "knowledge": "2025-01", + "last_updated": "2025-09-25", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Lite Preview 09-25", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-25", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-preview-04-17": { + "attachment": true, + "cost": { + "cache_read": 0.0375, + "input": 0.15, + "output": 0.6 + }, + "id": "gemini-2.5-flash-preview-04-17", + "knowledge": "2025-01", + "last_updated": "2025-04-17", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Preview 04-17", + "open_weights": false, + "reasoning": true, + "release_date": "2025-04-17", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-preview-05-20": { + "attachment": true, + "cost": { + "cache_read": 0.0375, + "input": 0.15, + "output": 0.6 + }, + "id": "gemini-2.5-flash-preview-05-20", + "knowledge": "2025-01", + "last_updated": "2025-05-20", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Preview 05-20", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-20", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-flash-preview-09-2025": { + "attachment": true, + "cost": { + "cache_read": 0.075, + "cache_write": 0.383, + "input": 0.3, + "output": 2.5 + }, + "id": "gemini-2.5-flash-preview-09-2025", + "knowledge": "2025-01", + "last_updated": "2025-09-25", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Preview 09-25", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-25", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-pro": { + "attachment": true, + "cost": { + "cache_read": 0.31, + "input": 1.25, + "output": 10 + }, + "id": "gemini-2.5-pro", + "knowledge": "2025-01", + "last_updated": "2025-06-05", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Pro", + "open_weights": false, + "reasoning": true, + "release_date": "2025-03-20", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-pro-preview-05-06": { + "attachment": true, + "cost": { + "cache_read": 0.31, + "input": 1.25, + "output": 10 + }, + "id": "gemini-2.5-pro-preview-05-06", + "knowledge": "2025-01", + "last_updated": "2025-05-06", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Pro Preview 05-06", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-06", + "temperature": true, + "tool_call": true + }, + "gemini-2.5-pro-preview-06-05": { + "attachment": true, + "cost": { + "cache_read": 0.31, + "input": 1.25, + "output": 10 + }, + "id": "gemini-2.5-pro-preview-06-05", + "knowledge": "2025-01", + "last_updated": "2025-06-05", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Pro Preview 06-05", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-05", + "temperature": true, + "tool_call": true + }, + "gemini-embedding-001": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0 + }, + "id": "gemini-embedding-001", + "knowledge": "2025-05", + "last_updated": "2025-05-20", + "limit": { + "context": 2048, + "output": 3072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Gemini Embedding 001", + "open_weights": false, + "reasoning": false, + "release_date": "2025-05-20", + "temperature": false, + "tool_call": false + }, + "gemini-flash-latest": { + "attachment": true, + "cost": { + "cache_read": 0.075, + "cache_write": 0.383, + "input": 0.3, + "output": 2.5 + }, + "id": "gemini-flash-latest", + "knowledge": "2025-01", + "last_updated": "2025-09-25", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini Flash Latest", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-25", + "temperature": true, + "tool_call": true + }, + "gemini-flash-lite-latest": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "gemini-flash-lite-latest", + "knowledge": "2025-01", + "last_updated": "2025-09-25", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini Flash-Lite Latest", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-25", + "temperature": true, + "tool_call": true + } + }, + "name": "Vertex", + "npm": "@ai-sdk/google-vertex" + }, + "google-vertex-anthropic": { + "doc": "https://cloud.google.com/vertex-ai/generative-ai/docs/partner-models/claude", + "env": [ + "GOOGLE_VERTEX_PROJECT", + "GOOGLE_VERTEX_LOCATION", + "GOOGLE_APPLICATION_CREDENTIALS" + ], + "id": "google-vertex-anthropic", + "models": { + "claude-3-5-haiku@20241022": { + "attachment": true, + "cost": { + "cache_read": 0.08, + "cache_write": 1, + "input": 0.8, + "output": 4 + }, + "id": "claude-3-5-haiku@20241022", + "knowledge": "2024-07-31", + "last_updated": "2024-10-22", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 3.5", + "open_weights": false, + "reasoning": false, + "release_date": "2024-10-22", + "temperature": true, + "tool_call": true + }, + "claude-3-5-sonnet@20241022": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-3-5-sonnet@20241022", + "knowledge": "2024-04-30", + "last_updated": "2024-10-22", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3.5 v2", + "open_weights": false, + "reasoning": false, + "release_date": "2024-10-22", + "temperature": true, + "tool_call": true + }, + "claude-3-7-sonnet@20250219": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-3-7-sonnet@20250219", + "knowledge": "2024-10-31", + "last_updated": "2025-02-19", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3.7", + "open_weights": false, + "reasoning": true, + "release_date": "2025-02-19", + "temperature": true, + "tool_call": true + }, + "claude-haiku-4-5@20251001": { + "attachment": true, + "cost": { + "cache_read": 0.1, + "cache_write": 1.25, + "input": 1, + "output": 5 + }, + "id": "claude-haiku-4-5@20251001", + "knowledge": "2025-02-31", + "last_updated": "2025-10-15", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 4.5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-10-15", + "temperature": true, + "tool_call": true + }, + "claude-opus-4-1@20250805": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "claude-opus-4-1@20250805", + "knowledge": "2025-03-31", + "last_updated": "2025-08-05", + "limit": { + "context": 200000, + "output": 32000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 4.1", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "claude-opus-4@20250514": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "claude-opus-4@20250514", + "knowledge": "2025-03-31", + "last_updated": "2025-05-22", + "limit": { + "context": 200000, + "output": 32000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 4", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-22", + "temperature": true, + "tool_call": true + }, + "claude-sonnet-4-5@20250929": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-sonnet-4-5@20250929", + "knowledge": "2025-07-31", + "last_updated": "2025-09-29", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 4.5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-29", + "temperature": true, + "tool_call": true + }, + "claude-sonnet-4@20250514": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "claude-sonnet-4@20250514", + "knowledge": "2025-03-31", + "last_updated": "2025-05-22", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 4", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-22", + "temperature": true, + "tool_call": true + } + }, + "name": "Vertex (Anthropic)", + "npm": "@ai-sdk/google-vertex" + }, + "groq": { + "doc": "https://console.groq.com/docs/models", + "env": [ + "GROQ_API_KEY" + ], + "id": "groq", + "models": { + "deepseek-r1-distill-llama-70b": { + "attachment": false, + "cost": { + "input": 0.75, + "output": 0.99 + }, + "id": "deepseek-r1-distill-llama-70b", + "knowledge": "2024-07", + "last_updated": "2025-01-20", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek R1 Distill Llama 70B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-01-20", + "temperature": true, + "tool_call": true + }, + "gemma2-9b-it": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.2 + }, + "id": "gemma2-9b-it", + "knowledge": "2024-06", + "last_updated": "2024-06-27", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Gemma 2 9B", + "open_weights": true, + "reasoning": false, + "release_date": "2024-06-27", + "temperature": true, + "tool_call": true + }, + "llama-3.1-8b-instant": { + "attachment": false, + "cost": { + "input": 0.05, + "output": 0.08 + }, + "id": "llama-3.1-8b-instant", + "knowledge": "2023-12", + "last_updated": "2024-07-23", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.1 8B Instant", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-23", + "temperature": true, + "tool_call": true + }, + "llama-3.3-70b-versatile": { + "attachment": false, + "cost": { + "input": 0.59, + "output": 0.79 + }, + "id": "llama-3.3-70b-versatile", + "knowledge": "2023-12", + "last_updated": "2024-12-06", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.3 70B Versatile", + "open_weights": true, + "reasoning": false, + "release_date": "2024-12-06", + "temperature": true, + "tool_call": true + }, + "llama-guard-3-8b": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.2 + }, + "id": "llama-guard-3-8b", + "last_updated": "2024-07-23", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama Guard 3 8B", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-23", + "temperature": true, + "tool_call": false + }, + "llama3-70b-8192": { + "attachment": false, + "cost": { + "input": 0.59, + "output": 0.79 + }, + "id": "llama3-70b-8192", + "knowledge": "2023-03", + "last_updated": "2024-04-18", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3 70B", + "open_weights": true, + "reasoning": false, + "release_date": "2024-04-18", + "temperature": true, + "tool_call": true + }, + "llama3-8b-8192": { + "attachment": false, + "cost": { + "input": 0.05, + "output": 0.08 + }, + "id": "llama3-8b-8192", + "knowledge": "2023-03", + "last_updated": "2024-04-18", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3 8B", + "open_weights": true, + "reasoning": false, + "release_date": "2024-04-18", + "temperature": true, + "tool_call": true + }, + "meta-llama/llama-4-maverick-17b-128e-instruct": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.6 + }, + "id": "meta-llama/llama-4-maverick-17b-128e-instruct", + "knowledge": "2024-08", + "last_updated": "2025-04-05", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Llama 4 Maverick 17B", + "open_weights": true, + "reasoning": false, + "release_date": "2025-04-05", + "temperature": true, + "tool_call": true + }, + "meta-llama/llama-4-scout-17b-16e-instruct": { + "attachment": false, + "cost": { + "input": 0.11, + "output": 0.34 + }, + "id": "meta-llama/llama-4-scout-17b-16e-instruct", + "knowledge": "2024-08", + "last_updated": "2025-04-05", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Llama 4 Scout 17B", + "open_weights": true, + "reasoning": false, + "release_date": "2025-04-05", + "temperature": true, + "tool_call": true + }, + "meta-llama/llama-guard-4-12b": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.2 + }, + "id": "meta-llama/llama-guard-4-12b", + "last_updated": "2025-04-05", + "limit": { + "context": 131072, + "output": 128 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Llama Guard 4 12B", + "open_weights": true, + "reasoning": false, + "release_date": "2025-04-05", + "temperature": true, + "tool_call": false + }, + "mistral-saba-24b": { + "attachment": false, + "cost": { + "input": 0.79, + "output": 0.79 + }, + "id": "mistral-saba-24b", + "knowledge": "2024-08", + "last_updated": "2025-02-06", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Saba 24B", + "open_weights": false, + "reasoning": false, + "release_date": "2025-02-06", + "temperature": true, + "tool_call": true + }, + "moonshotai/kimi-k2-instruct": { + "attachment": false, + "cost": { + "input": 1, + "output": 3 + }, + "id": "moonshotai/kimi-k2-instruct", + "knowledge": "2024-10", + "last_updated": "2025-07-14", + "limit": { + "context": 131072, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-14", + "temperature": true, + "tool_call": true + }, + "moonshotai/kimi-k2-instruct-0905": { + "attachment": false, + "cost": { + "input": 1, + "output": 3 + }, + "id": "moonshotai/kimi-k2-instruct-0905", + "knowledge": "2024-10", + "last_updated": "2025-09-05", + "limit": { + "context": 262144, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 Instruct 0905", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-05", + "temperature": true, + "tool_call": true + }, + "openai/gpt-oss-120b": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0.75 + }, + "id": "openai/gpt-oss-120b", + "last_updated": "2025-08-05", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 120B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "openai/gpt-oss-20b": { + "attachment": false, + "cost": { + "input": 0.1, + "output": 0.5 + }, + "id": "openai/gpt-oss-20b", + "last_updated": "2025-08-05", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 20B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "qwen-qwq-32b": { + "attachment": false, + "cost": { + "input": 0.29, + "output": 0.39 + }, + "id": "qwen-qwq-32b", + "knowledge": "2024-09", + "last_updated": "2024-11-27", + "limit": { + "context": 131072, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen QwQ 32B", + "open_weights": true, + "reasoning": true, + "release_date": "2024-11-27", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-32b": { + "attachment": false, + "cost": { + "input": 0.29, + "output": 0.59 + }, + "id": "qwen/qwen3-32b", + "knowledge": "2024-11-08", + "last_updated": "2024-12-23", + "limit": { + "context": 131072, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 32B", + "open_weights": true, + "reasoning": true, + "release_date": "2024-12-23", + "temperature": true, + "tool_call": true + } + }, + "name": "Groq", + "npm": "@ai-sdk/groq" + }, + "huggingface": { + "api": "https://router.huggingface.co/v1", + "doc": "https://huggingface.co/docs/inference-providers", + "env": [ + "HF_TOKEN" + ], + "id": "huggingface", + "models": { + "MiniMaxAI/MiniMax-M2": { + "attachment": false, + "cost": { + "input": 0.3, + "output": 1.2 + }, + "id": "MiniMaxAI/MiniMax-M2", + "knowledge": "2025-10", + "last_updated": "2025-10-27", + "limit": { + "context": 204800, + "output": 204800 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "MiniMax-M2", + "open_weights": true, + "reasoning": true, + "release_date": "2025-10-27", + "temperature": true, + "tool_call": true + }, + "Qwen/Qwen3-235B-A22B-Thinking-2507": { + "attachment": false, + "cost": { + "input": 0.3, + "output": 3 + }, + "id": "Qwen/Qwen3-235B-A22B-Thinking-2507", + "knowledge": "2025-04", + "last_updated": "2025-07-25", + "limit": { + "context": 262144, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3-235B-A22B-Thinking-2507", + "open_weights": true, + "reasoning": true, + "release_date": "2025-07-25", + "temperature": true, + "tool_call": true + }, + "Qwen/Qwen3-Coder-480B-A35B-Instruct": { + "attachment": false, + "cost": { + "input": 2, + "output": 2 + }, + "id": "Qwen/Qwen3-Coder-480B-A35B-Instruct", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 262144, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3-Coder-480B-A35B-Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + }, + "Qwen/Qwen3-Embedding-4B": { + "attachment": false, + "cost": { + "input": 0.01, + "output": 0 + }, + "id": "Qwen/Qwen3-Embedding-4B", + "knowledge": "2024-12", + "last_updated": "2025-01-01", + "limit": { + "context": 32000, + "output": 2048 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen 3 Embedding 4B", + "open_weights": true, + "reasoning": false, + "release_date": "2025-01-01", + "temperature": false, + "tool_call": false + }, + "Qwen/Qwen3-Embedding-8B": { + "attachment": false, + "cost": { + "input": 0.01, + "output": 0 + }, + "id": "Qwen/Qwen3-Embedding-8B", + "knowledge": "2024-12", + "last_updated": "2025-01-01", + "limit": { + "context": 32000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen 3 Embedding 4B", + "open_weights": true, + "reasoning": false, + "release_date": "2025-01-01", + "temperature": false, + "tool_call": false + }, + "Qwen/Qwen3-Next-80B-A3B-Instruct": { + "attachment": false, + "cost": { + "input": 0.25, + "output": 1 + }, + "id": "Qwen/Qwen3-Next-80B-A3B-Instruct", + "knowledge": "2025-04", + "last_updated": "2025-09-11", + "limit": { + "context": 262144, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3-Next-80B-A3B-Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-11", + "temperature": true, + "tool_call": true + }, + "Qwen/Qwen3-Next-80B-A3B-Thinking": { + "attachment": false, + "cost": { + "input": 0.3, + "output": 2 + }, + "id": "Qwen/Qwen3-Next-80B-A3B-Thinking", + "knowledge": "2025-04", + "last_updated": "2025-09-11", + "limit": { + "context": 262144, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3-Next-80B-A3B-Thinking", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-11", + "temperature": true, + "tool_call": true + }, + "deepseek-ai/DeepSeek-R1-0528": { + "attachment": false, + "cost": { + "input": 3, + "output": 5 + }, + "id": "deepseek-ai/DeepSeek-R1-0528", + "knowledge": "2025-05", + "last_updated": "2025-05-28", + "limit": { + "context": 163840, + "output": 163840 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek-R1-0528", + "open_weights": true, + "reasoning": true, + "release_date": "2025-05-28", + "temperature": true, + "tool_call": true + }, + "deepseek-ai/Deepseek-V3-0324": { + "attachment": false, + "cost": { + "input": 1.25, + "output": 1.25 + }, + "id": "deepseek-ai/Deepseek-V3-0324", + "knowledge": "2024-10", + "last_updated": "2025-03-24", + "limit": { + "context": 16384, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek-V3-0324", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-24", + "temperature": true, + "tool_call": true + }, + "moonshotai/Kimi-K2-Instruct": { + "attachment": false, + "cost": { + "input": 1, + "output": 3 + }, + "id": "moonshotai/Kimi-K2-Instruct", + "knowledge": "2024-10", + "last_updated": "2025-07-14", + "limit": { + "context": 131072, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi-K2-Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-14", + "temperature": true, + "tool_call": true + }, + "moonshotai/Kimi-K2-Instruct-0905": { + "attachment": false, + "cost": { + "input": 1, + "output": 3 + }, + "id": "moonshotai/Kimi-K2-Instruct-0905", + "knowledge": "2024-10", + "last_updated": "2025-09-04", + "limit": { + "context": 262144, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi-K2-Instruct-0905", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-04", + "temperature": true, + "tool_call": true + }, + "zai-org/GLM-4.5": { + "attachment": false, + "cost": { + "input": 0.6, + "output": 2.2 + }, + "id": "zai-org/GLM-4.5", + "knowledge": "2025-04", + "last_updated": "2025-07-28", + "limit": { + "context": 131072, + "output": 98304 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM-4.5", + "open_weights": true, + "reasoning": true, + "release_date": "2025-07-28", + "temperature": true, + "tool_call": true + }, + "zai-org/GLM-4.5-Air": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 1.1 + }, + "id": "zai-org/GLM-4.5-Air", + "knowledge": "2025-04", + "last_updated": "2025-07-28", + "limit": { + "context": 128000, + "output": 96000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM-4.5-Air", + "open_weights": true, + "reasoning": true, + "release_date": "2025-07-28", + "temperature": true, + "tool_call": true + }, + "zai-org/GLM-4.6": { + "attachment": false, + "cost": { + "cache_read": 0.11, + "input": 0.6, + "output": 2.2 + }, + "id": "zai-org/GLM-4.6", + "knowledge": "2025-04", + "last_updated": "2025-09-30", + "limit": { + "context": 200000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM-4.6", + "open_weights": true, + "reasoning": true, + "release_date": "2025-09-30", + "temperature": true, + "tool_call": true + } + }, + "name": "Hugging Face", + "npm": "@ai-sdk/openai-compatible" + }, + "lmstudio": { + "api": "http://127.0.0.1:1234/v1", + "doc": "https://lmstudio.ai/models", + "env": [ + "LMSTUDIO_API_KEY" + ], + "id": "lmstudio", + "models": { + "openai/gpt-oss-20b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "openai/gpt-oss-20b", + "last_updated": "2025-08-05", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 20B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-30b-a3b-2507": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-30b-a3b-2507", + "knowledge": "2025-04", + "last_updated": "2025-07-30", + "limit": { + "context": 262144, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 30B A3B 2507", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-30", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-coder-30b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-coder-30b", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 262144, + "output": 65536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 30B", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + } + }, + "name": "LMStudio", + "npm": "@ai-sdk/openai-compatible" + }, + "mistral": { + "doc": "https://docs.mistral.ai/getting-started/models/", + "env": [ + "MISTRAL_API_KEY" + ], + "id": "mistral", + "models": { + "codestral-latest": { + "attachment": false, + "cost": { + "input": 0.3, + "output": 0.9 + }, + "id": "codestral-latest", + "knowledge": "2024-10", + "last_updated": "2025-01-04", + "limit": { + "context": 256000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Codestral", + "open_weights": true, + "reasoning": false, + "release_date": "2024-05-29", + "temperature": true, + "tool_call": true + }, + "devstral-medium-2507": { + "attachment": false, + "cost": { + "input": 0.4, + "output": 2 + }, + "id": "devstral-medium-2507", + "knowledge": "2025-05", + "last_updated": "2025-07-10", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Devstral Medium", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-10", + "temperature": true, + "tool_call": true + }, + "devstral-small-2505": { + "attachment": false, + "cost": { + "input": 0.1, + "output": 0.3 + }, + "id": "devstral-small-2505", + "knowledge": "2025-05", + "last_updated": "2025-05-07", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Devstral Small 2505", + "open_weights": true, + "reasoning": false, + "release_date": "2025-05-07", + "temperature": true, + "tool_call": true + }, + "devstral-small-2507": { + "attachment": false, + "cost": { + "input": 0.1, + "output": 0.3 + }, + "id": "devstral-small-2507", + "knowledge": "2025-05", + "last_updated": "2025-07-10", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Devstral Small", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-10", + "temperature": true, + "tool_call": true + }, + "magistral-medium-latest": { + "attachment": false, + "cost": { + "input": 2, + "output": 5 + }, + "id": "magistral-medium-latest", + "knowledge": "2025-06", + "last_updated": "2025-03-20", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Magistral Medium", + "open_weights": true, + "reasoning": true, + "release_date": "2025-03-17", + "temperature": true, + "tool_call": true + }, + "magistral-small": { + "attachment": false, + "cost": { + "input": 0.5, + "output": 1.5 + }, + "id": "magistral-small", + "knowledge": "2025-06", + "last_updated": "2025-03-17", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Magistral Small", + "open_weights": true, + "reasoning": true, + "release_date": "2025-03-17", + "temperature": true, + "tool_call": true + }, + "ministral-3b-latest": { + "attachment": false, + "cost": { + "input": 0.04, + "output": 0.04 + }, + "id": "ministral-3b-latest", + "knowledge": "2024-10", + "last_updated": "2024-10-04", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Ministral 3B", + "open_weights": true, + "reasoning": false, + "release_date": "2024-10-01", + "temperature": true, + "tool_call": true + }, + "ministral-8b-latest": { + "attachment": false, + "cost": { + "input": 0.1, + "output": 0.1 + }, + "id": "ministral-8b-latest", + "knowledge": "2024-10", + "last_updated": "2024-10-04", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Ministral 8B", + "open_weights": true, + "reasoning": false, + "release_date": "2024-10-01", + "temperature": true, + "tool_call": true + }, + "mistral-large-latest": { + "attachment": false, + "cost": { + "input": 2, + "output": 6 + }, + "id": "mistral-large-latest", + "knowledge": "2024-11", + "last_updated": "2024-11-04", + "limit": { + "context": 131072, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Large", + "open_weights": true, + "reasoning": false, + "release_date": "2024-11-01", + "temperature": true, + "tool_call": true + }, + "mistral-medium-2505": { + "attachment": true, + "cost": { + "input": 0.4, + "output": 2 + }, + "id": "mistral-medium-2505", + "knowledge": "2025-05", + "last_updated": "2025-05-07", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Medium 3", + "open_weights": false, + "reasoning": false, + "release_date": "2025-05-07", + "temperature": true, + "tool_call": true + }, + "mistral-medium-2508": { + "attachment": true, + "cost": { + "input": 0.4, + "output": 2 + }, + "id": "mistral-medium-2508", + "knowledge": "2025-05", + "last_updated": "2025-08-12", + "limit": { + "context": 262144, + "output": 262144 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Medium 3.1", + "open_weights": false, + "reasoning": false, + "release_date": "2025-08-12", + "temperature": true, + "tool_call": true + }, + "mistral-medium-latest": { + "attachment": false, + "cost": { + "input": 0.4, + "output": 2 + }, + "id": "mistral-medium-latest", + "knowledge": "2025-05", + "last_updated": "2025-05-10", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Medium", + "open_weights": true, + "reasoning": false, + "release_date": "2025-05-07", + "temperature": true, + "tool_call": true + }, + "mistral-nemo": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0.15 + }, + "id": "mistral-nemo", + "knowledge": "2024-07", + "last_updated": "2024-07-01", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Nemo", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-01", + "temperature": true, + "tool_call": true + }, + "mistral-small-latest": { + "attachment": false, + "cost": { + "input": 0.1, + "output": 0.3 + }, + "id": "mistral-small-latest", + "knowledge": "2025-03", + "last_updated": "2024-09-04", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Small", + "open_weights": true, + "reasoning": false, + "release_date": "2024-09-01", + "temperature": true, + "tool_call": true + }, + "open-mistral-7b": { + "attachment": false, + "cost": { + "input": 0.25, + "output": 0.25 + }, + "id": "open-mistral-7b", + "knowledge": "2023-12", + "last_updated": "2023-09-27", + "limit": { + "context": 8000, + "output": 8000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Mistral 7B", + "open_weights": true, + "reasoning": false, + "release_date": "2023-09-27", + "temperature": true, + "tool_call": true + }, + "open-mixtral-8x22b": { + "attachment": false, + "cost": { + "input": 2, + "output": 6 + }, + "id": "open-mixtral-8x22b", + "knowledge": "2024-04", + "last_updated": "2024-04-17", + "limit": { + "context": 64000, + "output": 64000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Mixtral 8x22B", + "open_weights": true, + "reasoning": false, + "release_date": "2024-04-17", + "temperature": true, + "tool_call": true + }, + "open-mixtral-8x7b": { + "attachment": false, + "cost": { + "input": 0.7, + "output": 0.7 + }, + "id": "open-mixtral-8x7b", + "knowledge": "2024-01", + "last_updated": "2023-12-11", + "limit": { + "context": 32000, + "output": 32000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Mixtral 8x7B", + "open_weights": true, + "reasoning": false, + "release_date": "2023-12-11", + "temperature": true, + "tool_call": true + }, + "pixtral-12b": { + "attachment": true, + "cost": { + "input": 0.15, + "output": 0.15 + }, + "id": "pixtral-12b", + "knowledge": "2024-09", + "last_updated": "2024-09-01", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Pixtral 12B", + "open_weights": true, + "reasoning": false, + "release_date": "2024-09-01", + "temperature": true, + "tool_call": true + }, + "pixtral-large-latest": { + "attachment": true, + "cost": { + "input": 2, + "output": 6 + }, + "id": "pixtral-large-latest", + "knowledge": "2024-11", + "last_updated": "2024-11-04", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Pixtral Large", + "open_weights": true, + "reasoning": false, + "release_date": "2024-11-01", + "temperature": true, + "tool_call": true + } + }, + "name": "Mistral", + "npm": "@ai-sdk/mistral" + }, + "nebius": { + "api": "https://api.studio.nebius.com/v1/", + "doc": "https://docs.studio.nebius.com/quickstart", + "env": [ + "NEBIUS_API_KEY" + ], + "id": "nebius", + "models": { + "NousResearch/hermes-4-405b": { + "attachment": false, + "cost": { + "input": 1, + "output": 3 + }, + "id": "NousResearch/hermes-4-405b", + "knowledge": "2024-07", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Hermes-4 405B", + "open_weights": false, + "reasoning": true, + "release_date": "2024-08-01", + "temperature": true, + "tool_call": true + }, + "NousResearch/hermes-4-70b": { + "attachment": false, + "cost": { + "input": 0.13, + "output": 0.4 + }, + "id": "NousResearch/hermes-4-70b", + "knowledge": "2024-07", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Hermes 4 70B", + "open_weights": false, + "reasoning": true, + "release_date": "2024-08-01", + "temperature": true, + "tool_call": true + }, + "deepseek-ai/deepseek-v3": { + "attachment": false, + "cost": { + "input": 0.5, + "output": 1.5 + }, + "id": "deepseek-ai/deepseek-v3", + "knowledge": "2024-04", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek V3", + "open_weights": false, + "reasoning": true, + "release_date": "2024-05-07", + "temperature": true, + "tool_call": true + }, + "meta-llama/llama-3.3-70b-instruct-base": { + "attachment": false, + "cost": { + "input": 0.13, + "output": 0.4 + }, + "id": "meta-llama/llama-3.3-70b-instruct-base", + "knowledge": "2024-08", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama-3.3-70B-Instruct (Base)", + "open_weights": false, + "reasoning": true, + "release_date": "2024-08-22", + "temperature": true, + "tool_call": true + }, + "meta-llama/llama-3.3-70b-instruct-fast": { + "attachment": false, + "cost": { + "input": 0.25, + "output": 0.75 + }, + "id": "meta-llama/llama-3.3-70b-instruct-fast", + "knowledge": "2024-08", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama-3.3-70B-Instruct (Fast)", + "open_weights": false, + "reasoning": true, + "release_date": "2024-08-22", + "temperature": true, + "tool_call": true + }, + "meta-llama/llama-3_1-405b-instruct": { + "attachment": false, + "cost": { + "input": 1, + "output": 3 + }, + "id": "meta-llama/llama-3_1-405b-instruct", + "knowledge": "2024-03", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.1 405B Instruct", + "open_weights": false, + "reasoning": true, + "release_date": "2024-07-23", + "temperature": true, + "tool_call": true + }, + "moonshotai/kimi-k2-instruct": { + "attachment": false, + "cost": { + "input": 0.5, + "output": 2.4 + }, + "id": "moonshotai/kimi-k2-instruct", + "knowledge": "2024-01", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 Instruct", + "open_weights": false, + "reasoning": true, + "release_date": "2025-01-01", + "temperature": true, + "tool_call": true + }, + "nvidia/llama-3_1-nemotron-ultra-253b-v1": { + "attachment": false, + "cost": { + "input": 0.6, + "output": 1.8 + }, + "id": "nvidia/llama-3_1-nemotron-ultra-253b-v1", + "knowledge": "2024-07", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.1 Nemotron Ultra 253B v1", + "open_weights": false, + "reasoning": true, + "release_date": "2024-07-01", + "temperature": true, + "tool_call": true + }, + "openai/gpt-oss-120b": { + "attachment": true, + "cost": { + "input": 0.15, + "output": 0.6 + }, + "id": "openai/gpt-oss-120b", + "knowledge": "2024-01", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 120B", + "open_weights": false, + "reasoning": true, + "release_date": "2024-01-01", + "temperature": true, + "tool_call": true + }, + "openai/gpt-oss-20b": { + "attachment": true, + "cost": { + "input": 0.05, + "output": 0.2 + }, + "id": "openai/gpt-oss-20b", + "knowledge": "2024-01", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 20B", + "open_weights": false, + "reasoning": true, + "release_date": "2024-01-01", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-235b-a22b-instruct-2507": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.6 + }, + "id": "qwen/qwen3-235b-a22b-instruct-2507", + "knowledge": "2025-07", + "last_updated": "2025-10-04", + "limit": { + "context": 262144, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 235B A22B Instruct 2507", + "open_weights": false, + "reasoning": true, + "release_date": "2025-07-25", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-235b-a22b-thinking-2507": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.8 + }, + "id": "qwen/qwen3-235b-a22b-thinking-2507", + "knowledge": "2025-07", + "last_updated": "2025-10-04", + "limit": { + "context": 262144, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 235B A22B Thinking 2507", + "open_weights": false, + "reasoning": true, + "release_date": "2025-07-25", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-coder-480b-a35b-instruct": { + "attachment": false, + "cost": { + "input": 0.4, + "output": 1.8 + }, + "id": "qwen/qwen3-coder-480b-a35b-instruct", + "knowledge": "2025-04", + "last_updated": "2025-10-04", + "limit": { + "context": 262144, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 480B A35B Instruct", + "open_weights": false, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + }, + "zai-org/glm-4.5": { + "attachment": false, + "cost": { + "input": 0.6, + "output": 2.2 + }, + "id": "zai-org/glm-4.5", + "knowledge": "2024-05", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.5", + "open_weights": false, + "reasoning": true, + "release_date": "2024-06-01", + "temperature": true, + "tool_call": true + }, + "zai-org/glm-4.5-air": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 1.2 + }, + "id": "zai-org/glm-4.5-air", + "knowledge": "2024-05", + "last_updated": "2025-10-04", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.5 Air", + "open_weights": false, + "reasoning": true, + "release_date": "2024-06-01", + "temperature": true, + "tool_call": true + } + }, + "name": "Nebius AI Studio", + "npm": "@ai-sdk/openai-compatible" + }, + "nvidia": { + "api": "https://integrate.api.nvidia.com/v1", + "doc": "https://docs.api.nvidia.com/nim/", + "env": [ + "NVIDIA_API_KEY" + ], + "id": "nvidia", + "models": { + "black-forest-labs/flux.1-dev": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "black-forest-labs/flux.1-dev", + "knowledge": "2024-08", + "last_updated": "2025-09-05", + "limit": { + "context": 4096, + "output": 0 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "image" + ] + }, + "name": "FLUX.1-dev", + "open_weights": false, + "reasoning": false, + "release_date": "2024-08-01", + "temperature": true, + "tool_call": false + }, + "deepseek-ai/deepseek-v3.1": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek-ai/deepseek-v3.1", + "knowledge": "2024-07", + "last_updated": "2025-08-26", + "limit": { + "context": 128000, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek V3.1", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-20", + "temperature": true, + "tool_call": true + }, + "deepseek-ai/deepseek-v3.1-terminus": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek-ai/deepseek-v3.1-terminus", + "knowledge": "2025-01", + "last_updated": "2025-09-22", + "limit": { + "context": 128000, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek V3.1 Terminus", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-22", + "temperature": true, + "tool_call": true + }, + "google/gemma-3-27b-it": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "google/gemma-3-27b-it", + "knowledge": "2024-12", + "last_updated": "2025-09-05", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Gemma-3-27B-IT", + "open_weights": false, + "reasoning": true, + "release_date": "2024-12-01", + "temperature": true, + "tool_call": true + }, + "microsoft/phi-4-mini-instruct": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "microsoft/phi-4-mini-instruct", + "knowledge": "2024-12", + "last_updated": "2025-09-05", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio" + ], + "output": [ + "text" + ] + }, + "name": "Phi-4-Mini", + "open_weights": false, + "reasoning": true, + "release_date": "2024-12-01", + "temperature": true, + "tool_call": true + }, + "moonshotai/kimi-k2-instruct": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "moonshotai/kimi-k2-instruct", + "knowledge": "2024-01", + "last_updated": "2025-09-05", + "limit": { + "context": 128000, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 Instruct", + "open_weights": false, + "reasoning": true, + "release_date": "2025-01-01", + "temperature": true, + "tool_call": true + }, + "moonshotai/kimi-k2-instruct-0905": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "moonshotai/kimi-k2-instruct-0905", + "knowledge": "2024-10", + "last_updated": "2025-09-05", + "limit": { + "context": 262144, + "output": 262144 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 0905", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-05", + "temperature": true, + "tool_call": true + }, + "nvidia/cosmos-nemotron-34b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "nvidia/cosmos-nemotron-34b", + "knowledge": "2024-01", + "last_updated": "2025-09-05", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "video" + ], + "output": [ + "text" + ] + }, + "name": "Cosmos Nemotron 34B", + "open_weights": false, + "reasoning": true, + "release_date": "2024-01-01", + "temperature": true, + "tool_call": false + }, + "nvidia/llama-3.1-nemotron-ultra-253b-v1": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "nvidia/llama-3.1-nemotron-ultra-253b-v1", + "knowledge": "2024-07", + "last_updated": "2025-09-05", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama-3.1-Nemotron-Ultra-253B-v1", + "open_weights": false, + "reasoning": true, + "release_date": "2024-07-01", + "temperature": true, + "tool_call": true + }, + "nvidia/llama-embed-nemotron-8b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "nvidia/llama-embed-nemotron-8b", + "knowledge": "2025-03", + "last_updated": "2025-03-18", + "limit": { + "context": 32768, + "output": 2048 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama Embed Nemotron 8B", + "open_weights": false, + "reasoning": false, + "release_date": "2025-03-18", + "temperature": false, + "tool_call": false + }, + "nvidia/nemoretriever-ocr-v1": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "nvidia/nemoretriever-ocr-v1", + "knowledge": "2024-01", + "last_updated": "2025-09-05", + "limit": { + "context": 0, + "output": 4096 + }, + "modalities": { + "input": [ + "image" + ], + "output": [ + "text" + ] + }, + "name": "NeMo Retriever OCR v1", + "open_weights": false, + "reasoning": false, + "release_date": "2024-01-01", + "temperature": false, + "tool_call": false + }, + "nvidia/parakeet-tdt-0.6b-v2": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "nvidia/parakeet-tdt-0.6b-v2", + "knowledge": "2024-01", + "last_updated": "2025-09-05", + "limit": { + "context": 0, + "output": 4096 + }, + "modalities": { + "input": [ + "audio" + ], + "output": [ + "text" + ] + }, + "name": "Parakeet TDT 0.6B v2", + "open_weights": false, + "reasoning": false, + "release_date": "2024-01-01", + "temperature": false, + "tool_call": false + }, + "openai/gpt-oss-120b": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "openai/gpt-oss-120b", + "knowledge": "2025-08", + "last_updated": "2025-08-14", + "limit": { + "context": 128000, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT-OSS-120B", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-04", + "temperature": true, + "tool_call": false + }, + "openai/whisper-large-v3": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "openai/whisper-large-v3", + "knowledge": "2023-09", + "last_updated": "2025-09-05", + "limit": { + "context": 0, + "output": 4096 + }, + "modalities": { + "input": [ + "audio" + ], + "output": [ + "text" + ] + }, + "name": "Whisper Large v3", + "open_weights": true, + "reasoning": false, + "release_date": "2023-09-01", + "temperature": false, + "tool_call": false + }, + "qwen/qwen3-235b-a22b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-235b-a22b", + "knowledge": "2024-12", + "last_updated": "2025-09-05", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3-235B-A22B", + "open_weights": false, + "reasoning": true, + "release_date": "2024-12-01", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-coder-480b-a35b-instruct": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-coder-480b-a35b-instruct", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 262144, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 480B A35B Instruct", + "open_weights": false, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + } + }, + "name": "Nvidia", + "npm": "@ai-sdk/openai-compatible" + }, + "openai": { + "doc": "https://platform.openai.com/docs/models", + "env": [ + "OPENAI_API_KEY" + ], + "id": "openai", + "models": { + "codex-mini-latest": { + "attachment": true, + "cost": { + "cache_read": 0.375, + "input": 1.5, + "output": 6 + }, + "id": "codex-mini-latest", + "knowledge": "2024-04", + "last_updated": "2025-05-16", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Codex Mini", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-16", + "temperature": false, + "tool_call": true + }, + "gpt-3.5-turbo": { + "attachment": false, + "cost": { + "cache_read": 1.25, + "input": 0.5, + "output": 1.5 + }, + "id": "gpt-3.5-turbo", + "knowledge": "2021-09-01", + "last_updated": "2023-11-06", + "limit": { + "context": 16385, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT-3.5-turbo", + "open_weights": false, + "reasoning": false, + "release_date": "2023-03-01", + "temperature": true, + "tool_call": false + }, + "gpt-4": { + "attachment": true, + "cost": { + "input": 30, + "output": 60 + }, + "id": "gpt-4", + "knowledge": "2023-11", + "last_updated": "2024-04-09", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4", + "open_weights": false, + "reasoning": false, + "release_date": "2023-11-06", + "temperature": true, + "tool_call": true + }, + "gpt-4-turbo": { + "attachment": true, + "cost": { + "input": 10, + "output": 30 + }, + "id": "gpt-4-turbo", + "knowledge": "2023-12", + "last_updated": "2024-04-09", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4 Turbo", + "open_weights": false, + "reasoning": false, + "release_date": "2023-11-06", + "temperature": true, + "tool_call": true + }, + "gpt-4.1": { + "attachment": true, + "cost": { + "cache_read": 0.5, + "input": 2, + "output": 8 + }, + "id": "gpt-4.1", + "knowledge": "2024-04", + "last_updated": "2025-04-14", + "limit": { + "context": 1047576, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4.1", + "open_weights": false, + "reasoning": false, + "release_date": "2025-04-14", + "temperature": true, + "tool_call": true + }, + "gpt-4.1-mini": { + "attachment": true, + "cost": { + "cache_read": 0.1, + "input": 0.4, + "output": 1.6 + }, + "id": "gpt-4.1-mini", + "knowledge": "2024-04", + "last_updated": "2025-04-14", + "limit": { + "context": 1047576, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4.1 mini", + "open_weights": false, + "reasoning": false, + "release_date": "2025-04-14", + "temperature": true, + "tool_call": true + }, + "gpt-4.1-nano": { + "attachment": true, + "cost": { + "cache_read": 0.03, + "input": 0.1, + "output": 0.4 + }, + "id": "gpt-4.1-nano", + "knowledge": "2024-04", + "last_updated": "2025-04-14", + "limit": { + "context": 1047576, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4.1 nano", + "open_weights": false, + "reasoning": false, + "release_date": "2025-04-14", + "temperature": true, + "tool_call": true + }, + "gpt-4o": { + "attachment": true, + "cost": { + "cache_read": 1.25, + "input": 2.5, + "output": 10 + }, + "id": "gpt-4o", + "knowledge": "2023-09", + "last_updated": "2024-08-06", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4o", + "open_weights": false, + "reasoning": false, + "release_date": "2024-05-13", + "temperature": true, + "tool_call": true + }, + "gpt-4o-2024-05-13": { + "attachment": true, + "cost": { + "input": 5, + "output": 15 + }, + "id": "gpt-4o-2024-05-13", + "knowledge": "2023-09", + "last_updated": "2024-05-13", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4o (2024-05-13)", + "open_weights": false, + "reasoning": false, + "release_date": "2024-05-13", + "temperature": true, + "tool_call": true + }, + "gpt-4o-2024-08-06": { + "attachment": true, + "cost": { + "cache_read": 1.25, + "input": 2.5, + "output": 10 + }, + "id": "gpt-4o-2024-08-06", + "knowledge": "2023-09", + "last_updated": "2024-08-06", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4o (2024-08-06)", + "open_weights": false, + "reasoning": false, + "release_date": "2024-08-06", + "temperature": true, + "tool_call": true + }, + "gpt-4o-2024-11-20": { + "attachment": true, + "cost": { + "cache_read": 1.25, + "input": 2.5, + "output": 10 + }, + "id": "gpt-4o-2024-11-20", + "knowledge": "2023-09", + "last_updated": "2024-11-20", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4o (2024-11-20)", + "open_weights": false, + "reasoning": false, + "release_date": "2024-11-20", + "temperature": true, + "tool_call": true + }, + "gpt-4o-mini": { + "attachment": true, + "cost": { + "cache_read": 0.08, + "input": 0.15, + "output": 0.6 + }, + "id": "gpt-4o-mini", + "knowledge": "2023-09", + "last_updated": "2024-07-18", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4o mini", + "open_weights": false, + "reasoning": false, + "release_date": "2024-07-18", + "temperature": true, + "tool_call": true + }, + "gpt-5": { + "attachment": true, + "cost": { + "cache_read": 0.13, + "input": 1.25, + "output": 10 + }, + "id": "gpt-5", + "knowledge": "2024-09-30", + "last_updated": "2025-08-07", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": false, + "tool_call": true + }, + "gpt-5-chat-latest": { + "attachment": true, + "cost": { + "input": 1.25, + "output": 10 + }, + "id": "gpt-5-chat-latest", + "knowledge": "2024-09-30", + "last_updated": "2025-08-07", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Chat (latest)", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": true, + "tool_call": false + }, + "gpt-5-codex": { + "attachment": false, + "cost": { + "cache_read": 0.125, + "input": 1.25, + "output": 10 + }, + "id": "gpt-5-codex", + "knowledge": "2024-09-30", + "last_updated": "2025-09-15", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5-Codex", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-15", + "temperature": false, + "tool_call": true + }, + "gpt-5-mini": { + "attachment": true, + "cost": { + "cache_read": 0.03, + "input": 0.25, + "output": 2 + }, + "id": "gpt-5-mini", + "knowledge": "2024-05-30", + "last_updated": "2025-08-07", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Mini", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": false, + "tool_call": true + }, + "gpt-5-nano": { + "attachment": true, + "cost": { + "cache_read": 0.01, + "input": 0.05, + "output": 0.4 + }, + "id": "gpt-5-nano", + "knowledge": "2024-05-30", + "last_updated": "2025-08-07", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Nano", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": false, + "tool_call": true + }, + "gpt-5-pro": { + "attachment": true, + "cost": { + "input": 15, + "output": 120 + }, + "id": "gpt-5-pro", + "knowledge": "2024-09-30", + "last_updated": "2025-10-06", + "limit": { + "context": 400000, + "output": 272000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Pro", + "open_weights": false, + "reasoning": true, + "release_date": "2025-10-06", + "temperature": false, + "tool_call": true + }, + "o1": { + "attachment": true, + "cost": { + "cache_read": 7.5, + "input": 15, + "output": 60 + }, + "id": "o1", + "knowledge": "2023-09", + "last_updated": "2024-12-05", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o1", + "open_weights": false, + "reasoning": true, + "release_date": "2024-12-05", + "temperature": false, + "tool_call": true + }, + "o1-mini": { + "attachment": false, + "cost": { + "cache_read": 0.55, + "input": 1.1, + "output": 4.4 + }, + "id": "o1-mini", + "knowledge": "2023-09", + "last_updated": "2024-09-12", + "limit": { + "context": 128000, + "output": 65536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "o1-mini", + "open_weights": false, + "reasoning": true, + "release_date": "2024-09-12", + "temperature": false, + "tool_call": false + }, + "o1-preview": { + "attachment": false, + "cost": { + "cache_read": 7.5, + "input": 15, + "output": 60 + }, + "id": "o1-preview", + "knowledge": "2023-09", + "last_updated": "2024-09-12", + "limit": { + "context": 128000, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "o1-preview", + "open_weights": false, + "reasoning": true, + "release_date": "2024-09-12", + "temperature": true, + "tool_call": false + }, + "o1-pro": { + "attachment": true, + "cost": { + "input": 150, + "output": 600 + }, + "id": "o1-pro", + "knowledge": "2023-09", + "last_updated": "2025-03-19", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o1-pro", + "open_weights": false, + "reasoning": true, + "release_date": "2025-03-19", + "temperature": false, + "tool_call": true + }, + "o3": { + "attachment": true, + "cost": { + "cache_read": 0.5, + "input": 2, + "output": 8 + }, + "id": "o3", + "knowledge": "2024-05", + "last_updated": "2025-04-16", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o3", + "open_weights": false, + "reasoning": true, + "release_date": "2025-04-16", + "temperature": false, + "tool_call": true + }, + "o3-deep-research": { + "attachment": true, + "cost": { + "cache_read": 2.5, + "input": 10, + "output": 40 + }, + "id": "o3-deep-research", + "knowledge": "2024-05", + "last_updated": "2024-06-26", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o3-deep-research", + "open_weights": false, + "reasoning": true, + "release_date": "2024-06-26", + "temperature": false, + "tool_call": true + }, + "o3-mini": { + "attachment": false, + "cost": { + "cache_read": 0.55, + "input": 1.1, + "output": 4.4 + }, + "id": "o3-mini", + "knowledge": "2024-05", + "last_updated": "2025-01-29", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "o3-mini", + "open_weights": false, + "reasoning": true, + "release_date": "2024-12-20", + "temperature": false, + "tool_call": true + }, + "o3-pro": { + "attachment": true, + "cost": { + "input": 20, + "output": 80 + }, + "id": "o3-pro", + "knowledge": "2024-05", + "last_updated": "2025-06-10", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o3-pro", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-10", + "temperature": false, + "tool_call": true + }, + "o4-mini": { + "attachment": true, + "cost": { + "cache_read": 0.28, + "input": 1.1, + "output": 4.4 + }, + "id": "o4-mini", + "knowledge": "2024-05", + "last_updated": "2025-04-16", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o4-mini", + "open_weights": false, + "reasoning": true, + "release_date": "2025-04-16", + "temperature": false, + "tool_call": true + }, + "o4-mini-deep-research": { + "attachment": true, + "cost": { + "cache_read": 0.5, + "input": 2, + "output": 8 + }, + "id": "o4-mini-deep-research", + "knowledge": "2024-05", + "last_updated": "2024-06-26", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o4-mini-deep-research", + "open_weights": false, + "reasoning": true, + "release_date": "2024-06-26", + "temperature": false, + "tool_call": true + }, + "text-embedding-3-large": { + "attachment": false, + "cost": { + "input": 0.13, + "output": 0 + }, + "id": "text-embedding-3-large", + "knowledge": "2024-01", + "last_updated": "2024-01-25", + "limit": { + "context": 8191, + "output": 3072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "text-embedding-3-large", + "open_weights": false, + "reasoning": false, + "release_date": "2024-01-25", + "temperature": false, + "tool_call": false + }, + "text-embedding-3-small": { + "attachment": false, + "cost": { + "input": 0.02, + "output": 0 + }, + "id": "text-embedding-3-small", + "knowledge": "2024-01", + "last_updated": "2024-01-25", + "limit": { + "context": 8191, + "output": 1536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "text-embedding-3-small", + "open_weights": false, + "reasoning": false, + "release_date": "2024-01-25", + "temperature": false, + "tool_call": false + }, + "text-embedding-ada-002": { + "attachment": false, + "cost": { + "input": 0.1, + "output": 0 + }, + "id": "text-embedding-ada-002", + "knowledge": "2022-12", + "last_updated": "2022-12-15", + "limit": { + "context": 8192, + "output": 1536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "text-embedding-ada-002", + "open_weights": false, + "reasoning": false, + "release_date": "2022-12-15", + "temperature": false, + "tool_call": false + } + }, + "name": "OpenAI", + "npm": "@ai-sdk/openai" + }, + "openrouter": { + "api": "https://openrouter.ai/api/v1", + "doc": "https://openrouter.ai/models", + "env": [ + "OPENROUTER_API_KEY" + ], + "id": "openrouter", + "models": { + "anthropic/claude-3.5-haiku": { + "attachment": true, + "cost": { + "cache_read": 0.08, + "cache_write": 1, + "input": 0.8, + "output": 4 + }, + "id": "anthropic/claude-3.5-haiku", + "knowledge": "2024-07-31", + "last_updated": "2024-10-22", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 3.5", + "open_weights": false, + "reasoning": false, + "release_date": "2024-10-22", + "temperature": true, + "tool_call": true + }, + "anthropic/claude-3.7-sonnet": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "anthropic/claude-3.7-sonnet", + "knowledge": "2024-01", + "last_updated": "2025-02-19", + "limit": { + "context": 200000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 3.7", + "open_weights": false, + "reasoning": true, + "release_date": "2025-02-19", + "temperature": true, + "tool_call": true + }, + "anthropic/claude-haiku-4.5": { + "attachment": true, + "cost": { + "cache_read": 0.1, + "cache_write": 1.25, + "input": 1, + "output": 5 + }, + "id": "anthropic/claude-haiku-4.5", + "knowledge": "2025-02-31", + "last_updated": "2025-10-15", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Haiku 4.5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-10-15", + "temperature": true, + "tool_call": true + }, + "anthropic/claude-opus-4": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "anthropic/claude-opus-4", + "knowledge": "2025-03-31", + "last_updated": "2025-05-22", + "limit": { + "context": 200000, + "output": 32000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 4", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-22", + "temperature": true, + "tool_call": true + }, + "anthropic/claude-opus-4.1": { + "attachment": true, + "cost": { + "cache_read": 1.5, + "cache_write": 18.75, + "input": 15, + "output": 75 + }, + "id": "anthropic/claude-opus-4.1", + "knowledge": "2025-03-31", + "last_updated": "2025-08-05", + "limit": { + "context": 200000, + "output": 32000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Opus 4.1", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "anthropic/claude-sonnet-4": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "anthropic/claude-sonnet-4", + "knowledge": "2025-03-31", + "last_updated": "2025-05-22", + "limit": { + "context": 200000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 4", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-22", + "temperature": true, + "tool_call": true + }, + "anthropic/claude-sonnet-4.5": { + "attachment": true, + "cost": { + "cache_read": 0.3, + "cache_write": 3.75, + "input": 3, + "output": 15 + }, + "id": "anthropic/claude-sonnet-4.5", + "knowledge": "2025-07-31", + "last_updated": "2025-09-29", + "limit": { + "context": 1000000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Claude Sonnet 4.5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-29", + "temperature": true, + "tool_call": true + }, + "cognitivecomputations/dolphin3.0-mistral-24b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "cognitivecomputations/dolphin3.0-mistral-24b", + "knowledge": "2024-10", + "last_updated": "2025-02-13", + "limit": { + "context": 32768, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Dolphin3.0 Mistral 24B", + "open_weights": true, + "reasoning": false, + "release_date": "2025-02-13", + "temperature": true, + "tool_call": true + }, + "cognitivecomputations/dolphin3.0-r1-mistral-24b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "cognitivecomputations/dolphin3.0-r1-mistral-24b", + "knowledge": "2024-10", + "last_updated": "2025-02-13", + "limit": { + "context": 32768, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Dolphin3.0 R1 Mistral 24B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-02-13", + "temperature": true, + "tool_call": true + }, + "deepseek/deepseek-chat-v3-0324": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek/deepseek-chat-v3-0324", + "knowledge": "2024-10", + "last_updated": "2025-03-24", + "limit": { + "context": 16384, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek V3 0324", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-24", + "temperature": true, + "tool_call": false + }, + "deepseek/deepseek-chat-v3.1": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.8 + }, + "id": "deepseek/deepseek-chat-v3.1", + "knowledge": "2025-07", + "last_updated": "2025-08-21", + "limit": { + "context": 163840, + "output": 163840 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek-V3.1", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-21", + "temperature": true, + "tool_call": true + }, + "deepseek/deepseek-r1-0528-qwen3-8b:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek/deepseek-r1-0528-qwen3-8b:free", + "knowledge": "2025-05", + "last_updated": "2025-05-29", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Deepseek R1 0528 Qwen3 8B (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-05-29", + "temperature": true, + "tool_call": true + }, + "deepseek/deepseek-r1-0528:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek/deepseek-r1-0528:free", + "knowledge": "2025-05", + "last_updated": "2025-05-28", + "limit": { + "context": 163840, + "output": 163840 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "R1 0528 (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-05-28", + "temperature": true, + "tool_call": true + }, + "deepseek/deepseek-r1-distill-llama-70b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek/deepseek-r1-distill-llama-70b", + "knowledge": "2024-10", + "last_updated": "2025-01-23", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek R1 Distill Llama 70B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-01-23", + "temperature": true, + "tool_call": false + }, + "deepseek/deepseek-r1-distill-qwen-14b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek/deepseek-r1-distill-qwen-14b", + "knowledge": "2024-10", + "last_updated": "2025-01-29", + "limit": { + "context": 64000, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek R1 Distill Qwen 14B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-01-29", + "temperature": true, + "tool_call": false + }, + "deepseek/deepseek-r1:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek/deepseek-r1:free", + "knowledge": "2025-01", + "last_updated": "2025-01-20", + "limit": { + "context": 163840, + "output": 163840 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "R1 (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-01-20", + "temperature": true, + "tool_call": true + }, + "deepseek/deepseek-v3-base:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "deepseek/deepseek-v3-base:free", + "knowledge": "2025-03", + "last_updated": "2025-03-29", + "limit": { + "context": 163840, + "output": 163840 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek V3 Base (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-29", + "temperature": true, + "tool_call": false + }, + "deepseek/deepseek-v3.1-terminus": { + "attachment": false, + "cost": { + "input": 0.27, + "output": 1 + }, + "id": "deepseek/deepseek-v3.1-terminus", + "knowledge": "2025-07", + "last_updated": "2025-09-22", + "limit": { + "context": 131072, + "output": 65536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek V3.1 Terminus", + "open_weights": true, + "reasoning": true, + "release_date": "2025-09-22", + "temperature": true, + "tool_call": true + }, + "deepseek/deepseek-v3.1-terminus:exacto": { + "attachment": false, + "cost": { + "input": 0.27, + "output": 1 + }, + "id": "deepseek/deepseek-v3.1-terminus:exacto", + "knowledge": "2025-07", + "last_updated": "2025-09-22", + "limit": { + "context": 131072, + "output": 65536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek V3.1 Terminus (exacto)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-09-22", + "temperature": true, + "tool_call": true + }, + "featherless/qwerky-72b": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "featherless/qwerky-72b", + "knowledge": "2024-10", + "last_updated": "2025-03-20", + "limit": { + "context": 32768, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwerky 72B", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-20", + "temperature": true, + "tool_call": false + }, + "google/gemini-2.0-flash-001": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "google/gemini-2.0-flash-001", + "knowledge": "2024-06", + "last_updated": "2024-12-11", + "limit": { + "context": 1048576, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.0 Flash", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-11", + "temperature": true, + "tool_call": true + }, + "google/gemini-2.0-flash-exp:free": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "google/gemini-2.0-flash-exp:free", + "knowledge": "2024-12", + "last_updated": "2024-12-11", + "limit": { + "context": 1048576, + "output": 1048576 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.0 Flash Experimental (free)", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-11", + "temperature": true, + "tool_call": true + }, + "google/gemini-2.5-flash": { + "attachment": true, + "cost": { + "cache_read": 0.0375, + "input": 0.3, + "output": 2.5 + }, + "id": "google/gemini-2.5-flash", + "knowledge": "2025-01", + "last_updated": "2025-07-17", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash", + "open_weights": false, + "reasoning": true, + "release_date": "2025-07-17", + "temperature": true, + "tool_call": true + }, + "google/gemini-2.5-flash-lite": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "google/gemini-2.5-flash-lite", + "knowledge": "2025-01", + "last_updated": "2025-06-17", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Lite", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-17", + "temperature": true, + "tool_call": true + }, + "google/gemini-2.5-flash-lite-preview-09-2025": { + "attachment": true, + "cost": { + "cache_read": 0.025, + "input": 0.1, + "output": 0.4 + }, + "id": "google/gemini-2.5-flash-lite-preview-09-2025", + "knowledge": "2025-01", + "last_updated": "2025-09-25", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Lite Preview 09-25", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-25", + "temperature": true, + "tool_call": true + }, + "google/gemini-2.5-flash-preview-09-2025": { + "attachment": true, + "cost": { + "cache_read": 0.031, + "input": 0.3, + "output": 2.5 + }, + "id": "google/gemini-2.5-flash-preview-09-2025", + "knowledge": "2025-01", + "last_updated": "2025-09-25", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Flash Preview 09-25", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-25", + "temperature": true, + "tool_call": true + }, + "google/gemini-2.5-pro": { + "attachment": true, + "cost": { + "cache_read": 0.31, + "input": 1.25, + "output": 10 + }, + "id": "google/gemini-2.5-pro", + "knowledge": "2025-01", + "last_updated": "2025-06-05", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Pro", + "open_weights": false, + "reasoning": true, + "release_date": "2025-03-20", + "temperature": true, + "tool_call": true + }, + "google/gemini-2.5-pro-preview-05-06": { + "attachment": true, + "cost": { + "cache_read": 0.31, + "input": 1.25, + "output": 10 + }, + "id": "google/gemini-2.5-pro-preview-05-06", + "knowledge": "2025-01", + "last_updated": "2025-05-06", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Pro Preview 05-06", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-06", + "temperature": true, + "tool_call": true + }, + "google/gemini-2.5-pro-preview-06-05": { + "attachment": true, + "cost": { + "cache_read": 0.31, + "input": 1.25, + "output": 10 + }, + "id": "google/gemini-2.5-pro-preview-06-05", + "knowledge": "2025-01", + "last_updated": "2025-06-05", + "limit": { + "context": 1048576, + "output": 65536 + }, + "modalities": { + "input": [ + "text", + "image", + "audio", + "video", + "pdf" + ], + "output": [ + "text" + ] + }, + "name": "Gemini 2.5 Pro Preview 06-05", + "open_weights": false, + "reasoning": true, + "release_date": "2025-06-05", + "temperature": true, + "tool_call": true + }, + "google/gemma-2-9b-it:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "google/gemma-2-9b-it:free", + "knowledge": "2024-06", + "last_updated": "2024-06-28", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Gemma 2 9B (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2024-06-28", + "temperature": true, + "tool_call": true + }, + "google/gemma-3-12b-it": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "google/gemma-3-12b-it", + "knowledge": "2024-10", + "last_updated": "2025-03-13", + "limit": { + "context": 96000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Gemma 3 12B IT", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-13", + "temperature": true, + "tool_call": true + }, + "google/gemma-3-27b-it": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "google/gemma-3-27b-it", + "knowledge": "2024-10", + "last_updated": "2025-03-12", + "limit": { + "context": 96000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Gemma 3 27B IT", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-12", + "temperature": true, + "tool_call": true + }, + "google/gemma-3n-e4b-it": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "google/gemma-3n-e4b-it", + "knowledge": "2024-10", + "last_updated": "2025-05-20", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio" + ], + "output": [ + "text" + ] + }, + "name": "Gemma 3n E4B IT", + "open_weights": true, + "reasoning": false, + "release_date": "2025-05-20", + "temperature": true, + "tool_call": false + }, + "google/gemma-3n-e4b-it:free": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "google/gemma-3n-e4b-it:free", + "knowledge": "2025-05", + "last_updated": "2025-05-20", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "audio" + ], + "output": [ + "text" + ] + }, + "name": "Gemma 3n 4B (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-05-20", + "temperature": true, + "tool_call": true + }, + "meta-llama/llama-3.2-11b-vision-instruct": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "meta-llama/llama-3.2-11b-vision-instruct", + "knowledge": "2023-12", + "last_updated": "2024-09-25", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.2 11B Vision Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-09-25", + "temperature": true, + "tool_call": false + }, + "meta-llama/llama-3.3-70b-instruct:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "meta-llama/llama-3.3-70b-instruct:free", + "knowledge": "2024-12", + "last_updated": "2024-12-06", + "limit": { + "context": 65536, + "output": 65536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.3 70B Instruct (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2024-12-06", + "temperature": true, + "tool_call": true + }, + "meta-llama/llama-4-scout:free": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "meta-llama/llama-4-scout:free", + "knowledge": "2024-08", + "last_updated": "2025-04-05", + "limit": { + "context": 64000, + "output": 64000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Llama 4 Scout (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-04-05", + "temperature": true, + "tool_call": true + }, + "microsoft/mai-ds-r1:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "microsoft/mai-ds-r1:free", + "knowledge": "2025-04", + "last_updated": "2025-04-21", + "limit": { + "context": 163840, + "output": 163840 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "MAI DS R1 (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-04-21", + "temperature": true, + "tool_call": true + }, + "minimax/minimax-01": { + "attachment": true, + "cost": { + "input": 0.2, + "output": 1.1 + }, + "id": "minimax/minimax-01", + "last_updated": "2025-01-15", + "limit": { + "context": 1000000, + "output": 1000000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "MiniMax-01", + "open_weights": true, + "reasoning": true, + "release_date": "2025-01-15", + "temperature": true, + "tool_call": true + }, + "minimax/minimax-m1": { + "attachment": false, + "cost": { + "input": 0.4, + "output": 2.2 + }, + "id": "minimax/minimax-m1", + "last_updated": "2025-06-17", + "limit": { + "context": 1000000, + "output": 40000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "MiniMax M1", + "open_weights": true, + "reasoning": true, + "release_date": "2025-06-17", + "temperature": true, + "tool_call": true + }, + "minimax/minimax-m2:free": { + "attachment": false, + "cost": { + "cache_read": 0, + "cache_write": 0, + "input": 0, + "output": 0 + }, + "id": "minimax/minimax-m2:free", + "last_updated": "2025-10-23", + "limit": { + "context": 204800, + "output": 131100 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "MiniMax M2 (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-10-23", + "temperature": true, + "tool_call": true + }, + "mistralai/codestral-2508": { + "attachment": false, + "cost": { + "input": 0.3, + "output": 0.9 + }, + "id": "mistralai/codestral-2508", + "knowledge": "2025-05", + "last_updated": "2025-08-01", + "limit": { + "context": 256000, + "output": 256000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Codestral 2508", + "open_weights": true, + "reasoning": false, + "release_date": "2025-08-01", + "temperature": true, + "tool_call": true + }, + "mistralai/devstral-medium-2507": { + "attachment": false, + "cost": { + "input": 0.4, + "output": 2 + }, + "id": "mistralai/devstral-medium-2507", + "knowledge": "2025-05", + "last_updated": "2025-07-10", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Devstral Medium", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-10", + "temperature": true, + "tool_call": true + }, + "mistralai/devstral-small-2505": { + "attachment": false, + "cost": { + "input": 0.06, + "output": 0.12 + }, + "id": "mistralai/devstral-small-2505", + "knowledge": "2025-05", + "last_updated": "2025-05-07", + "limit": { + "context": 128000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Devstral Small", + "open_weights": true, + "reasoning": false, + "release_date": "2025-05-07", + "temperature": true, + "tool_call": true + }, + "mistralai/devstral-small-2505:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "mistralai/devstral-small-2505:free", + "knowledge": "2025-05", + "last_updated": "2025-05-21", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Devstral Small 2505 (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-05-21", + "temperature": true, + "tool_call": true + }, + "mistralai/devstral-small-2507": { + "attachment": false, + "cost": { + "input": 0.1, + "output": 0.3 + }, + "id": "mistralai/devstral-small-2507", + "knowledge": "2025-05", + "last_updated": "2025-07-10", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Devstral Small 1.1", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-10", + "temperature": true, + "tool_call": true + }, + "mistralai/mistral-7b-instruct:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "mistralai/mistral-7b-instruct:free", + "knowledge": "2024-05", + "last_updated": "2024-05-27", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Mistral 7B Instruct (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2024-05-27", + "temperature": true, + "tool_call": true + }, + "mistralai/mistral-medium-3": { + "attachment": true, + "cost": { + "input": 0.4, + "output": 2 + }, + "id": "mistralai/mistral-medium-3", + "knowledge": "2025-05", + "last_updated": "2025-05-07", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Medium 3", + "open_weights": false, + "reasoning": false, + "release_date": "2025-05-07", + "temperature": true, + "tool_call": true + }, + "mistralai/mistral-medium-3.1": { + "attachment": true, + "cost": { + "input": 0.4, + "output": 2 + }, + "id": "mistralai/mistral-medium-3.1", + "knowledge": "2025-05", + "last_updated": "2025-08-12", + "limit": { + "context": 262144, + "output": 262144 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Medium 3.1", + "open_weights": false, + "reasoning": false, + "release_date": "2025-08-12", + "temperature": true, + "tool_call": true + }, + "mistralai/mistral-nemo:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "mistralai/mistral-nemo:free", + "knowledge": "2024-07", + "last_updated": "2024-07-19", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Nemo (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2024-07-19", + "temperature": true, + "tool_call": true + }, + "mistralai/mistral-small-3.1-24b-instruct": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "mistralai/mistral-small-3.1-24b-instruct", + "knowledge": "2024-10", + "last_updated": "2025-03-17", + "limit": { + "context": 128000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Small 3.1 24B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-17", + "temperature": true, + "tool_call": true + }, + "mistralai/mistral-small-3.2-24b-instruct": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "mistralai/mistral-small-3.2-24b-instruct", + "knowledge": "2024-10", + "last_updated": "2025-06-20", + "limit": { + "context": 96000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Small 3.2 24B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-06-20", + "temperature": true, + "tool_call": true + }, + "mistralai/mistral-small-3.2-24b-instruct:free": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "mistralai/mistral-small-3.2-24b-instruct:free", + "knowledge": "2025-06", + "last_updated": "2025-06-20", + "limit": { + "context": 96000, + "output": 96000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Mistral Small 3.2 24B (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-06-20", + "temperature": true, + "tool_call": true + }, + "moonshotai/kimi-dev-72b:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "moonshotai/kimi-dev-72b:free", + "knowledge": "2025-06", + "last_updated": "2025-06-16", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi Dev 72b (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-06-16", + "temperature": true, + "tool_call": true + }, + "moonshotai/kimi-k2": { + "attachment": false, + "cost": { + "input": 0.55, + "output": 2.2 + }, + "id": "moonshotai/kimi-k2", + "knowledge": "2024-10", + "last_updated": "2025-07-11", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-11", + "temperature": true, + "tool_call": true + }, + "moonshotai/kimi-k2-0905": { + "attachment": false, + "cost": { + "input": 0.6, + "output": 2.5 + }, + "id": "moonshotai/kimi-k2-0905", + "knowledge": "2024-10", + "last_updated": "2025-09-05", + "limit": { + "context": 262144, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 Instruct 0905", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-05", + "temperature": true, + "tool_call": true + }, + "moonshotai/kimi-k2-0905:exacto": { + "attachment": false, + "cost": { + "input": 0.6, + "output": 2.5 + }, + "id": "moonshotai/kimi-k2-0905:exacto", + "knowledge": "2024-10", + "last_updated": "2025-09-05", + "limit": { + "context": 262144, + "output": 16384 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 Instruct 0905 (exacto)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-05", + "temperature": true, + "tool_call": true + }, + "moonshotai/kimi-k2:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "moonshotai/kimi-k2:free", + "knowledge": "2025-04", + "last_updated": "2025-07-11", + "limit": { + "context": 32800, + "output": 32800 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-11", + "temperature": true, + "tool_call": true + }, + "nousresearch/deephermes-3-llama-3-8b-preview": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "nousresearch/deephermes-3-llama-3-8b-preview", + "knowledge": "2024-04", + "last_updated": "2025-02-28", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepHermes 3 Llama 3 8B Preview", + "open_weights": true, + "reasoning": true, + "release_date": "2025-02-28", + "temperature": true, + "tool_call": true + }, + "nousresearch/hermes-4-405b": { + "attachment": false, + "cost": { + "input": 1, + "output": 3 + }, + "id": "nousresearch/hermes-4-405b", + "knowledge": "2023-12", + "last_updated": "2025-08-25", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Hermes 4 405B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-25", + "temperature": true, + "tool_call": true + }, + "nousresearch/hermes-4-70b": { + "attachment": false, + "cost": { + "input": 0.13, + "output": 0.4 + }, + "id": "nousresearch/hermes-4-70b", + "knowledge": "2023-12", + "last_updated": "2025-08-25", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Hermes 4 70B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-25", + "temperature": true, + "tool_call": true + }, + "openai/gpt-4.1": { + "attachment": true, + "cost": { + "cache_read": 0.5, + "input": 2, + "output": 8 + }, + "id": "openai/gpt-4.1", + "knowledge": "2024-04", + "last_updated": "2025-04-14", + "limit": { + "context": 1047576, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4.1", + "open_weights": false, + "reasoning": false, + "release_date": "2025-04-14", + "temperature": true, + "tool_call": true + }, + "openai/gpt-4.1-mini": { + "attachment": true, + "cost": { + "cache_read": 0.1, + "input": 0.4, + "output": 1.6 + }, + "id": "openai/gpt-4.1-mini", + "knowledge": "2024-04", + "last_updated": "2025-04-14", + "limit": { + "context": 1047576, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4.1 Mini", + "open_weights": false, + "reasoning": false, + "release_date": "2025-04-14", + "temperature": true, + "tool_call": true + }, + "openai/gpt-4o-mini": { + "attachment": true, + "cost": { + "cache_read": 0.08, + "input": 0.15, + "output": 0.6 + }, + "id": "openai/gpt-4o-mini", + "knowledge": "2024-10", + "last_updated": "2024-07-18", + "limit": { + "context": 128000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-4o-mini", + "open_weights": false, + "reasoning": false, + "release_date": "2024-07-18", + "temperature": true, + "tool_call": true + }, + "openai/gpt-5": { + "attachment": true, + "cost": { + "input": 1.25, + "output": 10 + }, + "id": "openai/gpt-5", + "knowledge": "2024-10-01", + "last_updated": "2025-08-07", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": true, + "tool_call": true + }, + "openai/gpt-5-chat": { + "attachment": true, + "cost": { + "input": 1.25, + "output": 10 + }, + "id": "openai/gpt-5-chat", + "knowledge": "2024-09-30", + "last_updated": "2025-08-07", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Chat (latest)", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": true, + "tool_call": false + }, + "openai/gpt-5-codex": { + "attachment": true, + "cost": { + "cache_read": 0.125, + "input": 1.25, + "output": 10 + }, + "id": "openai/gpt-5-codex", + "knowledge": "2024-10-01", + "last_updated": "2025-09-15", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Codex", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-15", + "temperature": true, + "tool_call": true + }, + "openai/gpt-5-image": { + "attachment": true, + "cost": { + "cache_read": 1.25, + "input": 5, + "output": 10 + }, + "id": "openai/gpt-5-image", + "knowledge": "2024-10-01", + "last_updated": "2025-10-14", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image", + "pdf" + ], + "output": [ + "text", + "image" + ] + }, + "name": "GPT-5 Image", + "open_weights": false, + "reasoning": true, + "release_date": "2025-10-14", + "temperature": true, + "tool_call": true + }, + "openai/gpt-5-mini": { + "attachment": true, + "cost": { + "input": 0.25, + "output": 2 + }, + "id": "openai/gpt-5-mini", + "knowledge": "2024-10-01", + "last_updated": "2025-08-07", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Mini", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": true, + "tool_call": true + }, + "openai/gpt-5-nano": { + "attachment": true, + "cost": { + "input": 0.05, + "output": 0.4 + }, + "id": "openai/gpt-5-nano", + "knowledge": "2024-10-01", + "last_updated": "2025-08-07", + "limit": { + "context": 400000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Nano", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-07", + "temperature": true, + "tool_call": true + }, + "openai/gpt-5-pro": { + "attachment": true, + "cost": { + "input": 15, + "output": 120 + }, + "id": "openai/gpt-5-pro", + "knowledge": "2024-09-30", + "last_updated": "2025-10-06", + "limit": { + "context": 400000, + "output": 272000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "GPT-5 Pro", + "open_weights": false, + "reasoning": true, + "release_date": "2025-10-06", + "temperature": false, + "tool_call": true + }, + "openai/gpt-oss-120b": { + "attachment": false, + "cost": { + "input": 0.072, + "output": 0.28 + }, + "id": "openai/gpt-oss-120b", + "last_updated": "2025-08-05", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 120B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "openai/gpt-oss-120b:exacto": { + "attachment": false, + "cost": { + "input": 0.05, + "output": 0.24 + }, + "id": "openai/gpt-oss-120b:exacto", + "last_updated": "2025-08-05", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 120B (exacto)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "openai/gpt-oss-20b": { + "attachment": false, + "cost": { + "input": 0.05, + "output": 0.2 + }, + "id": "openai/gpt-oss-20b", + "last_updated": "2025-08-05", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 20B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + }, + "openai/o4-mini": { + "attachment": true, + "cost": { + "cache_read": 0.28, + "input": 1.1, + "output": 4.4 + }, + "id": "openai/o4-mini", + "knowledge": "2024-06", + "last_updated": "2025-04-16", + "limit": { + "context": 200000, + "output": 100000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "o4 Mini", + "open_weights": false, + "reasoning": true, + "release_date": "2025-04-16", + "temperature": true, + "tool_call": true + }, + "openrouter/cypher-alpha:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "openrouter/cypher-alpha:free", + "knowledge": "2025-07", + "last_updated": "2025-07-01", + "limit": { + "context": 1000000, + "output": 1000000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Cypher Alpha (free)", + "open_weights": false, + "reasoning": false, + "release_date": "2025-07-01", + "temperature": true, + "tool_call": true + }, + "openrouter/horizon-alpha": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "openrouter/horizon-alpha", + "knowledge": "2025-07", + "last_updated": "2025-07-30", + "limit": { + "context": 256000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Horizon Alpha", + "open_weights": false, + "reasoning": false, + "release_date": "2025-07-30", + "temperature": false, + "tool_call": true + }, + "openrouter/horizon-beta": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "openrouter/horizon-beta", + "knowledge": "2025-07", + "last_updated": "2025-08-01", + "limit": { + "context": 256000, + "output": 128000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Horizon Beta", + "open_weights": false, + "reasoning": false, + "release_date": "2025-08-01", + "temperature": false, + "tool_call": true + }, + "openrouter/sonoma-dusk-alpha": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "openrouter/sonoma-dusk-alpha", + "last_updated": "2024-09-05", + "limit": { + "context": 2000000, + "output": 2000000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Sonoma Dusk Alpha", + "open_weights": false, + "reasoning": false, + "release_date": "2024-09-05", + "temperature": false, + "tool_call": true + }, + "openrouter/sonoma-sky-alpha": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "openrouter/sonoma-sky-alpha", + "last_updated": "2024-09-05", + "limit": { + "context": 2000000, + "output": 2000000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Sonoma Sky Alpha", + "open_weights": false, + "reasoning": false, + "release_date": "2024-09-05", + "temperature": false, + "tool_call": true + }, + "qwen/qwen-2.5-coder-32b-instruct": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen-2.5-coder-32b-instruct", + "knowledge": "2024-10", + "last_updated": "2024-11-11", + "limit": { + "context": 32768, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen2.5 Coder 32B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2024-11-11", + "temperature": true, + "tool_call": false + }, + "qwen/qwen2.5-vl-32b-instruct:free": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen2.5-vl-32b-instruct:free", + "knowledge": "2025-03", + "last_updated": "2025-03-24", + "limit": { + "context": 8192, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image", + "video" + ], + "output": [ + "text" + ] + }, + "name": "Qwen2.5 VL 32B Instruct (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-03-24", + "temperature": true, + "tool_call": true + }, + "qwen/qwen2.5-vl-72b-instruct": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen2.5-vl-72b-instruct", + "knowledge": "2024-10", + "last_updated": "2025-02-01", + "limit": { + "context": 32768, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Qwen2.5 VL 72B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-02-01", + "temperature": true, + "tool_call": false + }, + "qwen/qwen2.5-vl-72b-instruct:free": { + "attachment": true, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen2.5-vl-72b-instruct:free", + "knowledge": "2025-02", + "last_updated": "2025-02-01", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Qwen2.5 VL 72B Instruct (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-02-01", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-14b:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-14b:free", + "knowledge": "2025-04", + "last_updated": "2025-04-28", + "limit": { + "context": 40960, + "output": 40960 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 14B (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-04-28", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-235b-a22b-07-25": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0.85 + }, + "id": "qwen/qwen3-235b-a22b-07-25", + "knowledge": "2025-04", + "last_updated": "2025-07-21", + "limit": { + "context": 262144, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 235B A22B Instruct 2507", + "open_weights": true, + "reasoning": false, + "release_date": "2025-04-28", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-235b-a22b-07-25:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-235b-a22b-07-25:free", + "knowledge": "2025-04", + "last_updated": "2025-07-21", + "limit": { + "context": 262144, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 235B A22B Instruct 2507 (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-04-28", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-235b-a22b-thinking-2507": { + "attachment": false, + "cost": { + "input": 0.078, + "output": 0.312 + }, + "id": "qwen/qwen3-235b-a22b-thinking-2507", + "knowledge": "2025-04", + "last_updated": "2025-07-25", + "limit": { + "context": 262144, + "output": 81920 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 235B A22B Thinking 2507", + "open_weights": true, + "reasoning": true, + "release_date": "2025-07-25", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-235b-a22b:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-235b-a22b:free", + "knowledge": "2025-04", + "last_updated": "2025-04-28", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 235B A22B (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-04-28", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-30b-a3b-instruct-2507": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.8 + }, + "id": "qwen/qwen3-30b-a3b-instruct-2507", + "knowledge": "2025-04", + "last_updated": "2025-07-29", + "limit": { + "context": 262000, + "output": 262000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 30B A3B Instruct 2507", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-29", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-30b-a3b-thinking-2507": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 0.8 + }, + "id": "qwen/qwen3-30b-a3b-thinking-2507", + "knowledge": "2025-04", + "last_updated": "2025-07-29", + "limit": { + "context": 262000, + "output": 262000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 30B A3B Thinking 2507", + "open_weights": true, + "reasoning": true, + "release_date": "2025-07-29", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-30b-a3b:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-30b-a3b:free", + "knowledge": "2025-04", + "last_updated": "2025-04-28", + "limit": { + "context": 40960, + "output": 40960 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 30B A3B (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-04-28", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-32b:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-32b:free", + "knowledge": "2025-04", + "last_updated": "2025-04-28", + "limit": { + "context": 40960, + "output": 40960 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 32B (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-04-28", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-8b:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-8b:free", + "knowledge": "2025-04", + "last_updated": "2025-04-28", + "limit": { + "context": 40960, + "output": 40960 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 8B (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-04-28", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-coder": { + "attachment": false, + "cost": { + "input": 0.3, + "output": 1.2 + }, + "id": "qwen/qwen3-coder", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 262144, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-coder:exacto": { + "attachment": false, + "cost": { + "input": 0.38, + "output": 1.53 + }, + "id": "qwen/qwen3-coder:exacto", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder (exacto)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-coder:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwen3-coder:free", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 262144, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 480B A35B Instruct (free)", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-max": { + "attachment": false, + "cost": { + "input": 1.2, + "output": 6 + }, + "id": "qwen/qwen3-max", + "last_updated": "2025-09-05", + "limit": { + "context": 262144, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Max", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-05", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-next-80b-a3b-instruct": { + "attachment": false, + "cost": { + "input": 0.14, + "output": 1.4 + }, + "id": "qwen/qwen3-next-80b-a3b-instruct", + "knowledge": "2025-04", + "last_updated": "2025-09-11", + "limit": { + "context": 262144, + "output": 262144 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Next 80B A3B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-09-11", + "temperature": true, + "tool_call": true + }, + "qwen/qwen3-next-80b-a3b-thinking": { + "attachment": false, + "cost": { + "input": 0.14, + "output": 1.4 + }, + "id": "qwen/qwen3-next-80b-a3b-thinking", + "knowledge": "2025-04", + "last_updated": "2025-09-11", + "limit": { + "context": 262144, + "output": 262144 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Next 80B A3B Thinking", + "open_weights": true, + "reasoning": true, + "release_date": "2025-09-11", + "temperature": true, + "tool_call": true + }, + "qwen/qwq-32b:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "qwen/qwq-32b:free", + "knowledge": "2025-03", + "last_updated": "2025-03-05", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "QwQ 32B (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-03-05", + "temperature": true, + "tool_call": true + }, + "rekaai/reka-flash-3": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "rekaai/reka-flash-3", + "knowledge": "2024-10", + "last_updated": "2025-03-12", + "limit": { + "context": 32768, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Reka Flash 3", + "open_weights": true, + "reasoning": true, + "release_date": "2025-03-12", + "temperature": true, + "tool_call": true + }, + "sarvamai/sarvam-m:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "sarvamai/sarvam-m:free", + "knowledge": "2025-05", + "last_updated": "2025-05-25", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Sarvam-M (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-05-25", + "temperature": true, + "tool_call": true + }, + "thudm/glm-z1-32b:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "thudm/glm-z1-32b:free", + "knowledge": "2025-04", + "last_updated": "2025-04-17", + "limit": { + "context": 32768, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM Z1 32B (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-04-17", + "temperature": true, + "tool_call": true + }, + "tngtech/deepseek-r1t2-chimera:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "tngtech/deepseek-r1t2-chimera:free", + "knowledge": "2025-07", + "last_updated": "2025-07-08", + "limit": { + "context": 163840, + "output": 163840 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek R1T2 Chimera (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-07-08", + "temperature": true, + "tool_call": false + }, + "x-ai/grok-3": { + "attachment": false, + "cost": { + "cache_read": 0.75, + "cache_write": 15, + "input": 3, + "output": 15 + }, + "id": "x-ai/grok-3", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3", + "open_weights": false, + "reasoning": false, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "x-ai/grok-3-beta": { + "attachment": false, + "cost": { + "cache_read": 0.75, + "cache_write": 15, + "input": 3, + "output": 15 + }, + "id": "x-ai/grok-3-beta", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3 Beta", + "open_weights": false, + "reasoning": false, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "x-ai/grok-3-mini": { + "attachment": false, + "cost": { + "cache_read": 0.075, + "cache_write": 0.5, + "input": 0.3, + "output": 0.5 + }, + "id": "x-ai/grok-3-mini", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3 Mini", + "open_weights": false, + "reasoning": true, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "x-ai/grok-3-mini-beta": { + "attachment": false, + "cost": { + "cache_read": 0.075, + "cache_write": 0.5, + "input": 0.3, + "output": 0.5 + }, + "id": "x-ai/grok-3-mini-beta", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3 Mini Beta", + "open_weights": false, + "reasoning": true, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "x-ai/grok-4": { + "attachment": false, + "cost": { + "cache_read": 0.75, + "cache_write": 15, + "input": 3, + "output": 15 + }, + "id": "x-ai/grok-4", + "knowledge": "2025-07", + "last_updated": "2025-07-09", + "limit": { + "context": 256000, + "output": 64000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 4", + "open_weights": false, + "reasoning": true, + "release_date": "2025-07-09", + "temperature": true, + "tool_call": true + }, + "x-ai/grok-4-fast": { + "attachment": false, + "cost": { + "cache_read": 0.05, + "cache_write": 0.05, + "input": 0.2, + "output": 0.5 + }, + "id": "x-ai/grok-4-fast", + "knowledge": "2024-11", + "last_updated": "2025-08-19", + "limit": { + "context": 2000000, + "output": 30000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Grok 4 Fast", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-19", + "temperature": true, + "tool_call": true + }, + "x-ai/grok-code-fast-1": { + "attachment": false, + "cost": { + "cache_read": 0.02, + "input": 0.2, + "output": 1.5 + }, + "id": "x-ai/grok-code-fast-1", + "knowledge": "2025-08", + "last_updated": "2025-08-26", + "limit": { + "context": 256000, + "output": 10000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok Code Fast 1", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-26", + "temperature": true, + "tool_call": true + }, + "z-ai/glm-4.5": { + "attachment": false, + "cost": { + "input": 0.6, + "output": 2.2 + }, + "id": "z-ai/glm-4.5", + "knowledge": "2025-04", + "last_updated": "2025-07-28", + "limit": { + "context": 128000, + "output": 96000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.5", + "open_weights": true, + "reasoning": true, + "release_date": "2025-07-28", + "temperature": true, + "tool_call": true + }, + "z-ai/glm-4.5-air": { + "attachment": false, + "cost": { + "input": 0.2, + "output": 1.1 + }, + "id": "z-ai/glm-4.5-air", + "knowledge": "2025-04", + "last_updated": "2025-07-28", + "limit": { + "context": 128000, + "output": 96000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.5 Air", + "open_weights": true, + "reasoning": true, + "release_date": "2025-07-28", + "temperature": true, + "tool_call": true + }, + "z-ai/glm-4.5-air:free": { + "attachment": false, + "cost": { + "input": 0, + "output": 0 + }, + "id": "z-ai/glm-4.5-air:free", + "knowledge": "2025-04", + "last_updated": "2025-07-28", + "limit": { + "context": 128000, + "output": 96000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.5 Air (free)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-07-28", + "temperature": true, + "tool_call": false + }, + "z-ai/glm-4.5v": { + "attachment": true, + "cost": { + "input": 0.6, + "output": 1.8 + }, + "id": "z-ai/glm-4.5v", + "knowledge": "2025-04", + "last_updated": "2025-08-11", + "limit": { + "context": 64000, + "output": 16384 + }, + "modalities": { + "input": [ + "text", + "image", + "video" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.5V", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-11", + "temperature": true, + "tool_call": true + }, + "z-ai/glm-4.6": { + "attachment": false, + "cost": { + "cache_read": 0.11, + "input": 0.6, + "output": 2.2 + }, + "id": "z-ai/glm-4.6", + "knowledge": "2025-09", + "last_updated": "2025-09-30", + "limit": { + "context": 200000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.6", + "open_weights": true, + "reasoning": true, + "release_date": "2025-09-30", + "temperature": true, + "tool_call": true + }, + "z-ai/glm-4.6:exacto": { + "attachment": false, + "cost": { + "cache_read": 0.11, + "input": 0.6, + "output": 1.9 + }, + "id": "z-ai/glm-4.6:exacto", + "knowledge": "2025-09", + "last_updated": "2025-09-30", + "limit": { + "context": 200000, + "output": 128000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GLM 4.6 (exacto)", + "open_weights": true, + "reasoning": true, + "release_date": "2025-09-30", + "temperature": true, + "tool_call": true + } + }, + "name": "OpenRouter", + "npm": "@ai-sdk/openai-compatible" + }, + "perplexity": { + "doc": "https://docs.perplexity.ai", + "env": [ + "PERPLEXITY_API_KEY" + ], + "id": "perplexity", + "models": { + "sonar": { + "attachment": false, + "cost": { + "input": 1, + "output": 1 + }, + "id": "sonar", + "knowledge": "2025-09-01", + "last_updated": "2025-09-01", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Sonar", + "open_weights": false, + "reasoning": false, + "release_date": "2024-01-01", + "temperature": true, + "tool_call": false + }, + "sonar-pro": { + "attachment": true, + "cost": { + "input": 3, + "output": 15 + }, + "id": "sonar-pro", + "knowledge": "2025-09-01", + "last_updated": "2025-09-01", + "limit": { + "context": 200000, + "output": 8192 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Sonar Pro", + "open_weights": false, + "reasoning": false, + "release_date": "2024-01-01", + "temperature": true, + "tool_call": false + }, + "sonar-reasoning": { + "attachment": false, + "cost": { + "input": 1, + "output": 5 + }, + "id": "sonar-reasoning", + "knowledge": "2025-09-01", + "last_updated": "2025-09-01", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Sonar Reasoning", + "open_weights": false, + "reasoning": true, + "release_date": "2024-01-01", + "temperature": true, + "tool_call": false + }, + "sonar-reasoning-pro": { + "attachment": true, + "cost": { + "input": 2, + "output": 8 + }, + "id": "sonar-reasoning-pro", + "knowledge": "2025-09-01", + "last_updated": "2025-09-01", + "limit": { + "context": 128000, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Sonar Reasoning Pro", + "open_weights": false, + "reasoning": true, + "release_date": "2024-01-01", + "temperature": true, + "tool_call": false + } + }, + "name": "Perplexity", + "npm": "@perplexity-ai/perplexity_ai" + }, + "togetherai": { + "doc": "https://docs.together.ai/docs/serverless-models", + "env": [ + "TOGETHER_API_KEY" + ], + "id": "togetherai", + "models": { + "Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8": { + "attachment": false, + "cost": { + "input": 2, + "output": 2 + }, + "id": "Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8", + "knowledge": "2025-04", + "last_updated": "2025-07-23", + "limit": { + "context": 262144, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Qwen3 Coder 480B A35B Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-23", + "temperature": true, + "tool_call": true + }, + "deepseek-ai/DeepSeek-R1": { + "attachment": false, + "cost": { + "input": 3, + "output": 7 + }, + "id": "deepseek-ai/DeepSeek-R1", + "knowledge": "2024-07", + "last_updated": "2025-03-24", + "limit": { + "context": 163839, + "output": 12288 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek R1", + "open_weights": true, + "reasoning": true, + "release_date": "2024-12-26", + "temperature": true, + "tool_call": false + }, + "deepseek-ai/DeepSeek-V3": { + "attachment": false, + "cost": { + "input": 1.25, + "output": 1.25 + }, + "id": "deepseek-ai/DeepSeek-V3", + "knowledge": "2024-07", + "last_updated": "2025-05-29", + "limit": { + "context": 131072, + "output": 12288 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "DeepSeek V3", + "open_weights": true, + "reasoning": true, + "release_date": "2025-01-20", + "temperature": true, + "tool_call": true + }, + "meta-llama/Llama-3.3-70B-Instruct-Turbo": { + "attachment": false, + "cost": { + "input": 0.88, + "output": 0.88 + }, + "id": "meta-llama/Llama-3.3-70B-Instruct-Turbo", + "knowledge": "2023-12", + "last_updated": "2024-12-06", + "limit": { + "context": 131072, + "output": 66536 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Llama 3.3 70B", + "open_weights": true, + "reasoning": false, + "release_date": "2024-12-06", + "temperature": true, + "tool_call": true + }, + "moonshotai/Kimi-K2-Instruct": { + "attachment": false, + "cost": { + "input": 1, + "output": 3 + }, + "id": "moonshotai/Kimi-K2-Instruct", + "knowledge": "2024-10", + "last_updated": "2025-07-14", + "limit": { + "context": 131072, + "output": 32768 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Kimi K2 Instruct", + "open_weights": true, + "reasoning": false, + "release_date": "2025-07-14", + "temperature": true, + "tool_call": true + }, + "openai/gpt-oss-120b": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0.6 + }, + "id": "openai/gpt-oss-120b", + "last_updated": "2025-08-05", + "limit": { + "context": 131072, + "output": 131072 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "GPT OSS 120B", + "open_weights": true, + "reasoning": true, + "release_date": "2025-08-05", + "temperature": true, + "tool_call": true + } + }, + "name": "Together AI", + "npm": "@ai-sdk/togetherai" + }, + "upstage": { + "api": "https://api.upstage.ai", + "doc": "https://developers.upstage.ai/docs/apis/chat", + "env": [ + "UPSTAGE_API_KEY" + ], + "id": "upstage", + "models": { + "solar-mini": { + "attachment": false, + "cost": { + "input": 0.15, + "output": 0.15 + }, + "id": "solar-mini", + "knowledge": "2024-09", + "last_updated": "2025-04-22", + "limit": { + "context": 32768, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "solar-mini", + "open_weights": false, + "reasoning": false, + "release_date": "2024-06-12", + "temperature": true, + "tool_call": true + }, + "solar-pro2": { + "attachment": false, + "cost": { + "input": 0.25, + "output": 0.25 + }, + "id": "solar-pro2", + "knowledge": "2025-03", + "last_updated": "2025-05-20", + "limit": { + "context": 65536, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "solar-pro2", + "open_weights": false, + "reasoning": true, + "release_date": "2025-05-20", + "temperature": true, + "tool_call": true + } + }, + "name": "Upstage", + "npm": "@ai-sdk/openai-compatible" + }, + "xai": { + "doc": "https://docs.x.ai/docs/models", + "env": [ + "XAI_API_KEY" + ], + "id": "xai", + "models": { + "grok-2": { + "attachment": false, + "cost": { + "cache_read": 2, + "input": 2, + "output": 10 + }, + "id": "grok-2", + "knowledge": "2024-08", + "last_updated": "2024-08-20", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 2", + "open_weights": false, + "reasoning": false, + "release_date": "2024-08-20", + "temperature": true, + "tool_call": true + }, + "grok-2-1212": { + "attachment": false, + "cost": { + "cache_read": 2, + "input": 2, + "output": 10 + }, + "id": "grok-2-1212", + "knowledge": "2024-08", + "last_updated": "2024-12-12", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 2 (1212)", + "open_weights": false, + "reasoning": false, + "release_date": "2024-12-12", + "temperature": true, + "tool_call": true + }, + "grok-2-latest": { + "attachment": false, + "cost": { + "cache_read": 2, + "input": 2, + "output": 10 + }, + "id": "grok-2-latest", + "knowledge": "2024-08", + "last_updated": "2024-12-12", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 2 Latest", + "open_weights": false, + "reasoning": false, + "release_date": "2024-08-20", + "temperature": true, + "tool_call": true + }, + "grok-2-vision": { + "attachment": true, + "cost": { + "cache_read": 2, + "input": 2, + "output": 10 + }, + "id": "grok-2-vision", + "knowledge": "2024-08", + "last_updated": "2024-08-20", + "limit": { + "context": 8192, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Grok 2 Vision", + "open_weights": false, + "reasoning": false, + "release_date": "2024-08-20", + "temperature": true, + "tool_call": true + }, + "grok-2-vision-1212": { + "attachment": true, + "cost": { + "cache_read": 2, + "input": 2, + "output": 10 + }, + "id": "grok-2-vision-1212", + "knowledge": "2024-08", + "last_updated": "2024-12-12", + "limit": { + "context": 8192, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Grok 2 Vision (1212)", + "open_weights": false, + "reasoning": false, + "release_date": "2024-08-20", + "temperature": true, + "tool_call": true + }, + "grok-2-vision-latest": { + "attachment": true, + "cost": { + "cache_read": 2, + "input": 2, + "output": 10 + }, + "id": "grok-2-vision-latest", + "knowledge": "2024-08", + "last_updated": "2024-12-12", + "limit": { + "context": 8192, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Grok 2 Vision Latest", + "open_weights": false, + "reasoning": false, + "release_date": "2024-08-20", + "temperature": true, + "tool_call": true + }, + "grok-3": { + "attachment": false, + "cost": { + "cache_read": 0.75, + "input": 3, + "output": 15 + }, + "id": "grok-3", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3", + "open_weights": false, + "reasoning": false, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "grok-3-fast": { + "attachment": false, + "cost": { + "cache_read": 1.25, + "input": 5, + "output": 25 + }, + "id": "grok-3-fast", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3 Fast", + "open_weights": false, + "reasoning": false, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "grok-3-fast-latest": { + "attachment": false, + "cost": { + "cache_read": 1.25, + "input": 5, + "output": 25 + }, + "id": "grok-3-fast-latest", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3 Fast Latest", + "open_weights": false, + "reasoning": false, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "grok-3-latest": { + "attachment": false, + "cost": { + "cache_read": 0.75, + "input": 3, + "output": 15 + }, + "id": "grok-3-latest", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3 Latest", + "open_weights": false, + "reasoning": false, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "grok-3-mini": { + "attachment": false, + "cost": { + "cache_read": 0.075, + "input": 0.3, + "output": 0.5, + "reasoning": 0.5 + }, + "id": "grok-3-mini", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3 Mini", + "open_weights": false, + "reasoning": true, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "grok-3-mini-fast": { + "attachment": false, + "cost": { + "cache_read": 0.15, + "input": 0.6, + "output": 4, + "reasoning": 4 + }, + "id": "grok-3-mini-fast", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3 Mini Fast", + "open_weights": false, + "reasoning": true, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "grok-3-mini-fast-latest": { + "attachment": false, + "cost": { + "cache_read": 0.15, + "input": 0.6, + "output": 4, + "reasoning": 4 + }, + "id": "grok-3-mini-fast-latest", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3 Mini Fast Latest", + "open_weights": false, + "reasoning": true, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "grok-3-mini-latest": { + "attachment": false, + "cost": { + "cache_read": 0.075, + "input": 0.3, + "output": 0.5, + "reasoning": 0.5 + }, + "id": "grok-3-mini-latest", + "knowledge": "2024-11", + "last_updated": "2025-02-17", + "limit": { + "context": 131072, + "output": 8192 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 3 Mini Latest", + "open_weights": false, + "reasoning": true, + "release_date": "2025-02-17", + "temperature": true, + "tool_call": true + }, + "grok-4": { + "attachment": false, + "cost": { + "cache_read": 0.75, + "input": 3, + "output": 15, + "reasoning": 15 + }, + "id": "grok-4", + "knowledge": "2025-07", + "last_updated": "2025-07-09", + "limit": { + "context": 256000, + "output": 64000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok 4", + "open_weights": false, + "reasoning": true, + "release_date": "2025-07-09", + "temperature": true, + "tool_call": true + }, + "grok-4-fast": { + "attachment": true, + "cost": { + "cache_read": 0.05, + "input": 0.2, + "output": 0.5 + }, + "id": "grok-4-fast", + "knowledge": "2025-07", + "last_updated": "2025-09-19", + "limit": { + "context": 2000000, + "output": 30000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Grok 4 Fast", + "open_weights": false, + "reasoning": true, + "release_date": "2025-09-19", + "temperature": true, + "tool_call": true + }, + "grok-4-fast-non-reasoning": { + "attachment": true, + "cost": { + "cache_read": 0.05, + "input": 0.2, + "output": 0.5 + }, + "id": "grok-4-fast-non-reasoning", + "knowledge": "2025-07", + "last_updated": "2025-09-19", + "limit": { + "context": 2000000, + "output": 30000 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Grok 4 Fast (Non-Reasoning)", + "open_weights": false, + "reasoning": false, + "release_date": "2025-09-19", + "temperature": true, + "tool_call": true + }, + "grok-beta": { + "attachment": false, + "cost": { + "cache_read": 5, + "input": 5, + "output": 15 + }, + "id": "grok-beta", + "knowledge": "2024-08", + "last_updated": "2024-11-01", + "limit": { + "context": 131072, + "output": 4096 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok Beta", + "open_weights": false, + "reasoning": false, + "release_date": "2024-11-01", + "temperature": true, + "tool_call": true + }, + "grok-code-fast-1": { + "attachment": false, + "cost": { + "cache_read": 0.02, + "input": 0.2, + "output": 1.5 + }, + "id": "grok-code-fast-1", + "knowledge": "2023-10", + "last_updated": "2025-08-28", + "limit": { + "context": 256000, + "output": 10000 + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + }, + "name": "Grok Code Fast 1", + "open_weights": false, + "reasoning": true, + "release_date": "2025-08-28", + "temperature": true, + "tool_call": true + }, + "grok-vision-beta": { + "attachment": true, + "cost": { + "cache_read": 5, + "input": 5, + "output": 15 + }, + "id": "grok-vision-beta", + "knowledge": "2024-08", + "last_updated": "2024-11-01", + "limit": { + "context": 8192, + "output": 4096 + }, + "modalities": { + "input": [ + "text", + "image" + ], + "output": [ + "text" + ] + }, + "name": "Grok Vision Beta", + "open_weights": false, + "reasoning": false, + "release_date": "2024-11-01", + "temperature": true, + "tool_call": true + } + }, + "name": "xAI", + "npm": "@ai-sdk/xai" + } +} \ No newline at end of file diff --git a/libs/model-profiles/langchain_model_profiles/model_profile.py b/libs/model-profiles/langchain_model_profiles/model_profile.py new file mode 100644 index 00000000000..e6dd04dacc6 --- /dev/null +++ b/libs/model-profiles/langchain_model_profiles/model_profile.py @@ -0,0 +1,171 @@ +"""Model profiles package.""" + +import re + +from typing_extensions import TypedDict + +from langchain_model_profiles._data_loader import _DataLoader + + +class ModelProfile(TypedDict, total=False): + """Model profile.""" + + # --- Input constraints --- + + max_input_tokens: int + """Maximum context window (tokens)""" + + image_inputs: bool + """Whether image inputs are supported.""" + # TODO: add more detail about formats? + + image_url_inputs: bool + """Whether [image URL inputs](https://docs.langchain.com/oss/python/langchain/models#multimodal) + are supported.""" + + pdf_inputs: bool + """Whether [PDF inputs](https://docs.langchain.com/oss/python/langchain/models#multimodal) + are supported.""" + # TODO: add more detail about formats? e.g. bytes or base64 + + audio_inputs: bool + """Whether [audio inputs](https://docs.langchain.com/oss/python/langchain/models#multimodal) + are supported.""" + # TODO: add more detail about formats? e.g. bytes or base64 + + video_inputs: bool + """Whether [video inputs](https://docs.langchain.com/oss/python/langchain/models#multimodal) + are supported.""" + # TODO: add more detail about formats? e.g. bytes or base64 + + image_tool_message: bool + """TODO: description.""" + + pdf_tool_message: bool + """TODO: description.""" + + # --- Output constraints --- + + max_output_tokens: int + """Maximum output tokens""" + + reasoning_output: bool + """Whether the model supports [reasoning / chain-of-thought](https://docs.langchain.com/oss/python/langchain/models#reasoning)""" + + image_outputs: bool + """Whether [image outputs](https://docs.langchain.com/oss/python/langchain/models#multimodal) + are supported.""" + + audio_outputs: bool + """Whether [audio outputs](https://docs.langchain.com/oss/python/langchain/models#multimodal) + are supported.""" + + video_outputs: bool + """Whether [video outputs](https://docs.langchain.com/oss/python/langchain/models#multimodal) + are supported.""" + + # --- Tool calling --- + tool_calling: bool + """Whether the model supports [tool calling](https://docs.langchain.com/oss/python/langchain/models#tool-calling)""" + + tool_choice: bool + """Whether the model supports [tool choice](https://docs.langchain.com/oss/python/langchain/models#forcing-tool-calls)""" + + # --- Structured output --- + structured_output: bool + """Whether the model supports a native [structured output](https://docs.langchain.com/oss/python/langchain/models#structured-outputs) + feature""" + + +_LOADER = _DataLoader() + +_lc_type_to_provider_id = { + "openai-chat": "openai", + "azure-openai-chat": "azure", + "anthropic-chat": "anthropic", + "chat-google-generative-ai": "google", + "vertexai": "google-vertex", + "anthropic-chat-vertexai": "google-vertex-anthropic", + "amazon_bedrock_chat": "amazon-bedrock", + "amazon_bedrock_converse_chat": "amazon-bedrock", + "chat-ai21": "ai21", + "chat-deepseek": "deepseek", + "fireworks-chat": "fireworks-ai", + "groq-chat": "groq", + "huggingface-chat-wrapper": "huggingface", + "mistralai-chat": "mistral", + "chat-ollama": "ollama", + "perplexitychat": "perplexity", + "together-chat": "togetherai", + "upstage-chat": "upstage", + "xai-chat": "xai", +} + + +def _translate_provider_and_model_id(provider: str, model: str) -> tuple[str, str]: + """Translate LangChain provider and model to models.dev equivalents. + + Args: + provider: LangChain provider ID. + model: LangChain model ID. + + Returns: + A tuple containing the models.dev provider ID and model ID. + """ + provider_id = _lc_type_to_provider_id.get(provider, provider) + + if provider_id in ("google", "google-vertex"): + # convert models/gemini-2.0-flash-001 to gemini-2.0-flash + model_id = re.sub(r"-\d{3}$", "", model.replace("models/", "")) + elif provider_id == "amazon-bedrock": + # strip region prefixes like "us." + model_id = re.sub(r"^[A-Za-z]{2}\.", "", model) + else: + model_id = model + + return provider_id, model_id + + +def get_model_profile(provider: str, model: str) -> ModelProfile | None: + """Get the model capabilities for a given model. + + Args: + provider: Identifier for provider (e.g., `'openai'`, `'anthropic'`). + model: Identifier for model (e.g., `'gpt-5'`, + `'claude-sonnet-4-5-20250929'`). + + Returns: + The model capabilities or `None` if not found in the data. + """ + if not provider or not model: + return None + + provider_id, model_id = _translate_provider_and_model_id(provider, model) + data = _LOADER.get_profile_data(provider_id, model_id) + if not data: + # If either (1) provider not found or (2) model not found under matched provider + return None + + # Map models.dev & augmentation fields -> ModelProfile fields + # See schema reference to see fields dropped: https://github.com/sst/models.dev?tab=readme-ov-file#schema-reference + profile = { + "max_input_tokens": data.get("limit", {}).get("context"), + "image_inputs": "image" in data.get("modalities", {}).get("input", []), + "image_url_inputs": data.get("image_url_inputs"), + "image_tool_message": data.get("image_tool_message"), + "audio_inputs": "audio" in data.get("modalities", {}).get("input", []), + "pdf_inputs": "pdf" in data.get("modalities", {}).get("input", []) + or data.get("pdf_inputs"), + "pdf_tool_message": data.get("pdf_tool_message"), + "video_inputs": "video" in data.get("modalities", {}).get("input", []), + "max_output_tokens": data.get("limit", {}).get("output"), + "reasoning_output": data.get("reasoning"), + "image_outputs": "image" in data.get("modalities", {}).get("output", []), + "audio_outputs": "audio" in data.get("modalities", {}).get("output", []), + "video_outputs": "video" in data.get("modalities", {}).get("output", []), + "tool_calling": data.get("tool_call"), + "tool_choice": data.get("tool_choice"), + "structured_output": data.get("structured_output"), + } + + return ModelProfile(**{k: v for k, v in profile.items() if v is not None}) # type: ignore[typeddict-item] diff --git a/libs/model-profiles/pyproject.toml b/libs/model-profiles/pyproject.toml new file mode 100644 index 00000000000..3d569ba88d3 --- /dev/null +++ b/libs/model-profiles/pyproject.toml @@ -0,0 +1,108 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "langchain-model-profiles" +description = "Centralized reference of LLM capabilities." +readme = "README.md" +license = { text = "MIT" } +authors = [] + +version = "0.0.3" +requires-python = ">=3.10.0,<4.0.0" +dependencies = [ + "tomli>=2.0.0,<3.0.0; python_version < '3.11'", + "typing-extensions>=4.7.0,<5.0.0", +] + +[project.urls] +Homepage = "https://docs.langchain.com/" +Documentation = "https://reference.langchain.com/python/langchain_model_profiles/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/model-profiles" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" + +[dependency-groups] +dev = [ + "httpx>=0.23.0,<1", # For refresh_data.py script +] + +test = [ + "pytest>=8.0.0,<9.0.0", + "pytest-cov>=4.0.0,<8.0.0", + "pytest-watcher>=0.2.6,<1.0.0", + "pytest-asyncio>=0.23.2,<2.0.0", + "pytest-socket>=0.6.0,<1.0.0", + "pytest-xdist<4.0.0,>=3.6.1", + "pytest-mock", + "syrupy>=4.0.2,<5.0.0", + "toml>=0.10.2,<1.0.0", + "langchain[openai]>=1.0.2,<2.0.0", + "langchain-core", +] + +test_integration = ["langchain-core"] + +lint = [ + "ruff>=0.12.2,<0.13.0", + "langchain", +] +typing = [ + "mypy>=1.18.1,<1.19.0", + "types-toml>=0.10.8.20240310,<1.0.0.0", +] + +[tool.uv.sources] +langchain-core = { path = "../core", editable = true } +langchain = { path = "../langchain_v1", editable = true } + +[tool.ruff.format] +docstring-code-format = true + +[tool.ruff.lint] +select = [ + "ALL" +] +ignore = [ + "COM812", # Messes with the formatter + "ISC001", # Messes with the formatter + "PERF203", # Rarely useful + "SLF001", # Private member access + "PLC0415", # Imports should be at the top. Not always desirable + "PLR0913", # Too many arguments in function definition + "PLC0414", # Inconsistent with how type checkers expect to be notified of intentional re-exports + "S101", # Tests need assertions + "PLR2004", # Magic numbers + "ARG001", + "D104", + "FIX002", + "TD002", + "TD003", +] +unfixable = ["B028"] # People should intentionally tune the stacklevel + +pyupgrade.keep-runtime-typing = true +flake8-annotations.allow-star-arg-any = true + +[tool.ruff.lint.pydocstyle] +convention = "google" +ignore-var-parameters = true # ignore missing documentation for *args and **kwargs parameters + +[tool.coverage.run] +omit = ["tests/*"] + +[tool.pytest.ini_options] +addopts = "--strict-markers --strict-config --durations=5 --snapshot-warn-unused -vv" +markers = [ + "requires: mark tests as requiring a specific library", + "scheduled: mark tests to run in scheduled testing", + "compile: mark placeholder test used to compile integration tests without running them", +] +asyncio_mode = "auto" +filterwarnings = [ + "ignore::langchain_core._api.beta_decorator.LangChainBetaWarning", + "ignore::langchain_core._api.deprecation.LangChainDeprecationWarning:tests", + "ignore::langchain_core._api.deprecation.LangChainPendingDeprecationWarning:tests", +] diff --git a/libs/model-profiles/scripts/refresh_data.py b/libs/model-profiles/scripts/refresh_data.py new file mode 100755 index 00000000000..53af341c07c --- /dev/null +++ b/libs/model-profiles/scripts/refresh_data.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 +"""Refresh model profile data from models.dev. + +Update the bundled model data by running: + python scripts/refresh_data.py +""" + +import json +from pathlib import Path + +import httpx + +PROVIDER_SUBSET = [ + # This is done to limit the data size + "amazon-bedrock", + "anthropic", + "azure", + "baseten", + "cerebras", + "cloudflare-workers-ai", + "deepinfra", + "deepseek", + "fireworks-ai", + "google", + "google-vertex", + "google-vertex-anthropic", + "groq", + "huggingface", + "lmstudio", + "mistral", + "nebius", + "nvidia", + "openai", + "openrouter", + "perplexity", + "togetherai", + "upstage", + "xai", +] + + +def main() -> None: + """Download and save the latest model data from models.dev.""" + api_url = "https://models.dev/api.json" + output_dir = Path(__file__).parent.parent / "langchain_model_profiles" / "data" + output_file = output_dir / "models.json" + + print(f"Downloading data from {api_url}...") # noqa: T201 + response = httpx.get(api_url, timeout=30) + response.raise_for_status() + + data = response.json() + + # Basic validation + if not isinstance(data, dict): + msg = "Expected API response to be a dictionary" + raise TypeError(msg) + + provider_count = len(data) + model_count = sum(len(provider.get("models", {})) for provider in data.values()) + + print(f"Downloaded {provider_count} providers with {model_count} models") # noqa: T201 + + # Subset providers + data = {k: v for k, v in data.items() if k in PROVIDER_SUBSET} + print(f"Filtered to {len(data)} providers based on subset") # noqa: T201 + + # Ensure directory exists + output_dir.mkdir(parents=True, exist_ok=True) + + # Write with pretty formatting for readability + print(f"Writing to {output_file}...") # noqa: T201 + with output_file.open("w") as f: + json.dump(data, f, indent=2, sort_keys=True) + + print(f"βœ“ Successfully refreshed model data ({output_file.stat().st_size:,} bytes)") # noqa: T201 + + +if __name__ == "__main__": + main() diff --git a/libs/langchain_v1/tests/unit_tests/storage/__init__.py b/libs/model-profiles/tests/__init__.py similarity index 100% rename from libs/langchain_v1/tests/unit_tests/storage/__init__.py rename to libs/model-profiles/tests/__init__.py diff --git a/libs/model-profiles/tests/integration_tests/__init__.py b/libs/model-profiles/tests/integration_tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/libs/model-profiles/tests/integration_tests/test_compile.py b/libs/model-profiles/tests/integration_tests/test_compile.py new file mode 100644 index 00000000000..1fa3ee28f44 --- /dev/null +++ b/libs/model-profiles/tests/integration_tests/test_compile.py @@ -0,0 +1,8 @@ +"""Test compilation of integration tests.""" + +import pytest + + +@pytest.mark.compile +def test_placeholder() -> None: + """Used for compiling integration tests without running any real tests.""" diff --git a/libs/model-profiles/tests/unit_tests/__init__.py b/libs/model-profiles/tests/unit_tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/libs/model-profiles/tests/unit_tests/test_chat_model.py b/libs/model-profiles/tests/unit_tests/test_chat_model.py new file mode 100644 index 00000000000..a32e52c0122 --- /dev/null +++ b/libs/model-profiles/tests/unit_tests/test_chat_model.py @@ -0,0 +1,18 @@ +"""End to end test for fetching model profiles from a chat model.""" + +from langchain.chat_models import init_chat_model + + +def test_chat_model() -> None: + """Test that chat model gets profile data correctly.""" + model = init_chat_model("openai:gpt-5", api_key="foo") + assert model.profile + assert model.profile["max_input_tokens"] == 400000 + assert model.profile["structured_output"] + assert model.profile["pdf_inputs"] + + +def test_chat_model_no_data() -> None: + """Test that chat model handles missing profile data.""" + model = init_chat_model("openai:gpt-fake", api_key="foo") + assert model.profile == {} diff --git a/libs/model-profiles/tests/unit_tests/test_data_loader.py b/libs/model-profiles/tests/unit_tests/test_data_loader.py new file mode 100644 index 00000000000..ebe5d003f46 --- /dev/null +++ b/libs/model-profiles/tests/unit_tests/test_data_loader.py @@ -0,0 +1,142 @@ +"""Tests for data loader with augmentation support.""" + +import json +from pathlib import Path + +import pytest + +from langchain_model_profiles._data_loader import _DataLoader + + +@pytest.fixture +def temp_data_dir(tmp_path: Path) -> Path: + """Create a temporary data directory structure.""" + data_dir = tmp_path / "data" + data_dir.mkdir() + + # Create base models.json + base_data = { + "test-provider": { + "id": "test-provider", + "models": { + "test-model": { + "id": "test-model", + "name": "Test Model", + "tool_call": True, + "limit": {"context": 8000, "output": 4000}, + "modalities": {"input": ["text"], "output": ["text"]}, + } + }, + } + } + + with (data_dir / "models.json").open("w") as f: + json.dump(base_data, f) + + # Create augmentations directories + aug_dir = data_dir / "augmentations" + (aug_dir / "providers").mkdir(parents=True) + (aug_dir / "models" / "test-provider").mkdir(parents=True) + + return data_dir + + +def test_load_base_data_only(temp_data_dir: Path) -> None: + """Test loading base data without augmentations.""" + loader = _DataLoader() + # Patch before any property access + loader._data_dir = temp_data_dir + result = loader.get_profile_data("test-provider", "test-model") + + assert result is not None + assert result["id"] == "test-model" + assert result["name"] == "Test Model" + assert result["tool_call"] is True + + +def test_provider_level_augmentation(temp_data_dir: Path) -> None: + """Test provider-level augmentations are applied.""" + # Add provider augmentation + provider_toml = temp_data_dir / "augmentations" / "providers" / "test-provider.toml" + provider_toml.write_text(""" +[profile] +image_url_inputs = true +pdf_inputs = true +""") + + loader = _DataLoader() + loader._data_dir = temp_data_dir + result = loader.get_profile_data("test-provider", "test-model") + + assert result is not None + assert result["image_url_inputs"] is True + assert result["pdf_inputs"] is True + # Base data should still be present + assert result["tool_call"] is True + + +def test_model_level_augmentation_overrides_provider(temp_data_dir: Path) -> None: + """Test model-level augmentations override provider augmentations.""" + # Add provider augmentation + provider_toml = temp_data_dir / "augmentations" / "providers" / "test-provider.toml" + provider_toml.write_text(""" +[profile] +image_url_inputs = true +pdf_inputs = false +""") + + # Add model augmentation that overrides + model_toml = ( + temp_data_dir / "augmentations" / "models" / "test-provider" / "test-model.toml" + ) + model_toml.write_text(""" +[profile] +pdf_inputs = true +reasoning_output = true +""") + + loader = _DataLoader() + loader._data_dir = temp_data_dir + result = loader.get_profile_data("test-provider", "test-model") + + assert result is not None + # From provider + assert result["image_url_inputs"] is True + # Overridden by model + assert result["pdf_inputs"] is True + # From model only + assert result["reasoning_output"] is True + # From base + assert result["tool_call"] is True + + +def test_missing_provider(temp_data_dir: Path) -> None: + """Test returns None for missing provider.""" + loader = _DataLoader() + loader._data_dir = temp_data_dir + result = loader.get_profile_data("nonexistent-provider", "test-model") + + assert result is None + + +def test_missing_model(temp_data_dir: Path) -> None: + """Test returns None for missing model.""" + loader = _DataLoader() + loader._data_dir = temp_data_dir + result = loader.get_profile_data("test-provider", "nonexistent-model") + + assert result is None + + +def test_merged_data_is_cached(temp_data_dir: Path) -> None: + """Test that merged data is cached after first access.""" + loader = _DataLoader() + loader._data_dir = temp_data_dir + # First access + result1 = loader.get_profile_data("test-provider", "test-model") + # Second access should use cached data + result2 = loader.get_profile_data("test-provider", "test-model") + + assert result1 == result2 + # Verify it's using the cached property by checking _merged_data was accessed + assert hasattr(loader, "_merged_data") diff --git a/libs/model-profiles/tests/unit_tests/test_model_profile.py b/libs/model-profiles/tests/unit_tests/test_model_profile.py new file mode 100644 index 00000000000..44c40c640a9 --- /dev/null +++ b/libs/model-profiles/tests/unit_tests/test_model_profile.py @@ -0,0 +1,31 @@ +"""Test provider and model ID mappings.""" + +import pytest + +from langchain_model_profiles.model_profile import get_model_profile + + +@pytest.mark.parametrize( + ("provider", "model_id"), + [ + ("openai-chat", "gpt-5"), + ("azure-openai-chat", "gpt-5"), + ("anthropic-chat", "claude-sonnet-4-5"), + ("vertexai", "models/gemini-2.0-flash-001"), + ("chat-google-generative-ai", "models/gemini-2.0-flash-001"), + ("amazon_bedrock_chat", "anthropic.claude-sonnet-4-20250514-v1:0"), + ("amazon_bedrock_converse_chat", "anthropic.claude-sonnet-4-20250514-v1:0"), + # ("chat-ai21", "jamba-mini"), # no data yet # noqa: ERA001 + ("chat-deepseek", "deepseek-reasoner"), + ("fireworks-chat", "accounts/fireworks/models/gpt-oss-20b"), + ("groq-chat", "llama-3.3-70b-versatile"), + ("huggingface-chat-wrapper", "Qwen/Qwen3-235B-A22B-Thinking-2507"), + ("mistralai-chat", "mistral-large-latest"), + # ("chat-ollama", "llama3.1"), # no data yet # noqa: ERA001 + ("perplexitychat", "sonar"), + ("xai-chat", "grok-4"), + ], +) +def test_id_translation(provider: str, model_id: str) -> None: + """Test translation from LangChain to model / provider IDs.""" + assert get_model_profile(provider, model_id) diff --git a/libs/model-profiles/uv.lock b/libs/model-profiles/uv.lock new file mode 100644 index 00000000000..de16e602a73 --- /dev/null +++ b/libs/model-profiles/uv.lock @@ -0,0 +1,1914 @@ +version = 1 +revision = 2 +requires-python = ">=3.10.0, <4.0.0" +resolution-markers = [ + "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", + "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", + "python_full_version < '3.11' and platform_python_implementation == 'PyPy'", + "python_full_version < '3.11' and platform_python_implementation != 'PyPy'", +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "certifi" +version = "2025.10.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d", size = 209709, upload-time = "2025-10-14T04:40:11.385Z" }, + { url = "https://files.pythonhosted.org/packages/5c/af/1f9d7f7faafe2ddfb6f72a2e07a548a629c61ad510fe60f9630309908fef/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8", size = 148814, upload-time = "2025-10-14T04:40:13.135Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/f2e3ac2bbc056ca0c204298ea4e3d9db9b4afe437812638759db2c976b5f/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad", size = 144467, upload-time = "2025-10-14T04:40:14.728Z" }, + { url = "https://files.pythonhosted.org/packages/ec/85/1bf997003815e60d57de7bd972c57dc6950446a3e4ccac43bc3070721856/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8", size = 162280, upload-time = "2025-10-14T04:40:16.14Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8e/6aa1952f56b192f54921c436b87f2aaf7c7a7c3d0d1a765547d64fd83c13/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d", size = 159454, upload-time = "2025-10-14T04:40:17.567Z" }, + { url = "https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313", size = 153609, upload-time = "2025-10-14T04:40:19.08Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/6a13396948b8fd3c4b4fd5bc74d045f5637d78c9675585e8e9fbe5636554/charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e", size = 151849, upload-time = "2025-10-14T04:40:20.607Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7a/59482e28b9981d105691e968c544cc0df3b7d6133152fb3dcdc8f135da7a/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93", size = 151586, upload-time = "2025-10-14T04:40:21.719Z" }, + { url = "https://files.pythonhosted.org/packages/92/59/f64ef6a1c4bdd2baf892b04cd78792ed8684fbc48d4c2afe467d96b4df57/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0", size = 145290, upload-time = "2025-10-14T04:40:23.069Z" }, + { url = "https://files.pythonhosted.org/packages/6b/63/3bf9f279ddfa641ffa1962b0db6a57a9c294361cc2f5fcac997049a00e9c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84", size = 163663, upload-time = "2025-10-14T04:40:24.17Z" }, + { url = "https://files.pythonhosted.org/packages/ed/09/c9e38fc8fa9e0849b172b581fd9803bdf6e694041127933934184e19f8c3/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e", size = 151964, upload-time = "2025-10-14T04:40:25.368Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d1/d28b747e512d0da79d8b6a1ac18b7ab2ecfd81b2944c4c710e166d8dd09c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db", size = 161064, upload-time = "2025-10-14T04:40:26.806Z" }, + { url = "https://files.pythonhosted.org/packages/bb/9a/31d62b611d901c3b9e5500c36aab0ff5eb442043fb3a1c254200d3d397d9/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6", size = 155015, upload-time = "2025-10-14T04:40:28.284Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/107e008fa2bff0c8b9319584174418e5e5285fef32f79d8ee6a430d0039c/charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f", size = 99792, upload-time = "2025-10-14T04:40:29.613Z" }, + { url = "https://files.pythonhosted.org/packages/eb/66/e396e8a408843337d7315bab30dbf106c38966f1819f123257f5520f8a96/charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d", size = 107198, upload-time = "2025-10-14T04:40:30.644Z" }, + { url = "https://files.pythonhosted.org/packages/b5/58/01b4f815bf0312704c267f2ccb6e5d42bcc7752340cd487bc9f8c3710597/charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69", size = 100262, upload-time = "2025-10-14T04:40:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/38/ee22495420457259d2f3390309505ea98f98a5eed40901cf62196abad006/coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050", size = 811905, upload-time = "2025-10-15T15:15:08.542Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/95/c49df0aceb5507a80b9fe5172d3d39bf23f05be40c23c8d77d556df96cec/coverage-7.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb53f1e8adeeb2e78962bade0c08bfdc461853c7969706ed901821e009b35e31", size = 215800, upload-time = "2025-10-15T15:12:19.824Z" }, + { url = "https://files.pythonhosted.org/packages/dc/c6/7bb46ce01ed634fff1d7bb53a54049f539971862cc388b304ff3c51b4f66/coverage-7.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9a03ec6cb9f40a5c360f138b88266fd8f58408d71e89f536b4f91d85721d075", size = 216198, upload-time = "2025-10-15T15:12:22.549Z" }, + { url = "https://files.pythonhosted.org/packages/94/b2/75d9d8fbf2900268aca5de29cd0a0fe671b0f69ef88be16767cc3c828b85/coverage-7.11.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0d7f0616c557cbc3d1c2090334eddcbb70e1ae3a40b07222d62b3aa47f608fab", size = 242953, upload-time = "2025-10-15T15:12:24.139Z" }, + { url = "https://files.pythonhosted.org/packages/65/ac/acaa984c18f440170525a8743eb4b6c960ace2dbad80dc22056a437fc3c6/coverage-7.11.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e44a86a47bbdf83b0a3ea4d7df5410d6b1a0de984fbd805fa5101f3624b9abe0", size = 244766, upload-time = "2025-10-15T15:12:25.974Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0d/938d0bff76dfa4a6b228c3fc4b3e1c0e2ad4aa6200c141fcda2bd1170227/coverage-7.11.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:596763d2f9a0ee7eec6e643e29660def2eef297e1de0d334c78c08706f1cb785", size = 246625, upload-time = "2025-10-15T15:12:27.387Z" }, + { url = "https://files.pythonhosted.org/packages/38/54/8f5f5e84bfa268df98f46b2cb396b1009734cfb1e5d6adb663d284893b32/coverage-7.11.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ef55537ff511b5e0a43edb4c50a7bf7ba1c3eea20b4f49b1490f1e8e0e42c591", size = 243568, upload-time = "2025-10-15T15:12:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/68/30/8ba337c2877fe3f2e1af0ed7ff4be0c0c4aca44d6f4007040f3ca2255e99/coverage-7.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cbabd8f4d0d3dc571d77ae5bdbfa6afe5061e679a9d74b6797c48d143307088", size = 244665, upload-time = "2025-10-15T15:12:30.297Z" }, + { url = "https://files.pythonhosted.org/packages/cc/fb/c6f1d6d9a665536b7dde2333346f0cc41dc6a60bd1ffc10cd5c33e7eb000/coverage-7.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e24045453384e0ae2a587d562df2a04d852672eb63051d16096d3f08aa4c7c2f", size = 242681, upload-time = "2025-10-15T15:12:32.326Z" }, + { url = "https://files.pythonhosted.org/packages/be/38/1b532319af5f991fa153c20373291dc65c2bf532af7dbcffdeef745c8f79/coverage-7.11.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:7161edd3426c8d19bdccde7d49e6f27f748f3c31cc350c5de7c633fea445d866", size = 242912, upload-time = "2025-10-15T15:12:34.079Z" }, + { url = "https://files.pythonhosted.org/packages/67/3d/f39331c60ef6050d2a861dc1b514fa78f85f792820b68e8c04196ad733d6/coverage-7.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d4ed4de17e692ba6415b0587bc7f12bc80915031fc9db46a23ce70fc88c9841", size = 243559, upload-time = "2025-10-15T15:12:35.809Z" }, + { url = "https://files.pythonhosted.org/packages/4b/55/cb7c9df9d0495036ce582a8a2958d50c23cd73f84a23284bc23bd4711a6f/coverage-7.11.0-cp310-cp310-win32.whl", hash = "sha256:765c0bc8fe46f48e341ef737c91c715bd2a53a12792592296a095f0c237e09cf", size = 218266, upload-time = "2025-10-15T15:12:37.429Z" }, + { url = "https://files.pythonhosted.org/packages/68/a8/b79cb275fa7bd0208767f89d57a1b5f6ba830813875738599741b97c2e04/coverage-7.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:24d6f3128f1b2d20d84b24f4074475457faedc3d4613a7e66b5e769939c7d969", size = 219169, upload-time = "2025-10-15T15:12:39.25Z" }, + { url = "https://files.pythonhosted.org/packages/49/3a/ee1074c15c408ddddddb1db7dd904f6b81bc524e01f5a1c5920e13dbde23/coverage-7.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d58ecaa865c5b9fa56e35efc51d1014d4c0d22838815b9fce57a27dd9576847", size = 215912, upload-time = "2025-10-15T15:12:40.665Z" }, + { url = "https://files.pythonhosted.org/packages/70/c4/9f44bebe5cb15f31608597b037d78799cc5f450044465bcd1ae8cb222fe1/coverage-7.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b679e171f1c104a5668550ada700e3c4937110dbdd153b7ef9055c4f1a1ee3cc", size = 216310, upload-time = "2025-10-15T15:12:42.461Z" }, + { url = "https://files.pythonhosted.org/packages/42/01/5e06077cfef92d8af926bdd86b84fb28bf9bc6ad27343d68be9b501d89f2/coverage-7.11.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca61691ba8c5b6797deb221a0d09d7470364733ea9c69425a640f1f01b7c5bf0", size = 246706, upload-time = "2025-10-15T15:12:44.001Z" }, + { url = "https://files.pythonhosted.org/packages/40/b8/7a3f1f33b35cc4a6c37e759137533119560d06c0cc14753d1a803be0cd4a/coverage-7.11.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aef1747ede4bd8ca9cfc04cc3011516500c6891f1b33a94add3253f6f876b7b7", size = 248634, upload-time = "2025-10-15T15:12:45.768Z" }, + { url = "https://files.pythonhosted.org/packages/7a/41/7f987eb33de386bc4c665ab0bf98d15fcf203369d6aacae74f5dd8ec489a/coverage-7.11.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1839d08406e4cba2953dcc0ffb312252f14d7c4c96919f70167611f4dee2623", size = 250741, upload-time = "2025-10-15T15:12:47.222Z" }, + { url = "https://files.pythonhosted.org/packages/23/c1/a4e0ca6a4e83069fb8216b49b30a7352061ca0cb38654bd2dc96b7b3b7da/coverage-7.11.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0eb0a2dcc62478eb5b4cbb80b97bdee852d7e280b90e81f11b407d0b81c4287", size = 246837, upload-time = "2025-10-15T15:12:48.904Z" }, + { url = "https://files.pythonhosted.org/packages/5d/03/ced062a17f7c38b4728ff76c3acb40d8465634b20b4833cdb3cc3a74e115/coverage-7.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fbea96343b53f65d5351d8fd3b34fd415a2670d7c300b06d3e14a5af4f552", size = 248429, upload-time = "2025-10-15T15:12:50.73Z" }, + { url = "https://files.pythonhosted.org/packages/97/af/a7c6f194bb8c5a2705ae019036b8fe7f49ea818d638eedb15fdb7bed227c/coverage-7.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:214b622259dd0cf435f10241f1333d32caa64dbc27f8790ab693428a141723de", size = 246490, upload-time = "2025-10-15T15:12:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/ab/c3/aab4df02b04a8fde79068c3c41ad7a622b0ef2b12e1ed154da986a727c3f/coverage-7.11.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:258d9967520cca899695d4eb7ea38be03f06951d6ca2f21fb48b1235f791e601", size = 246208, upload-time = "2025-10-15T15:12:54.586Z" }, + { url = "https://files.pythonhosted.org/packages/30/d8/e282ec19cd658238d60ed404f99ef2e45eed52e81b866ab1518c0d4163cf/coverage-7.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf9e6ff4ca908ca15c157c409d608da77a56a09877b97c889b98fb2c32b6465e", size = 247126, upload-time = "2025-10-15T15:12:56.485Z" }, + { url = "https://files.pythonhosted.org/packages/d1/17/a635fa07fac23adb1a5451ec756216768c2767efaed2e4331710342a3399/coverage-7.11.0-cp311-cp311-win32.whl", hash = "sha256:fcc15fc462707b0680cff6242c48625da7f9a16a28a41bb8fd7a4280920e676c", size = 218314, upload-time = "2025-10-15T15:12:58.365Z" }, + { url = "https://files.pythonhosted.org/packages/2a/29/2ac1dfcdd4ab9a70026edc8d715ece9b4be9a1653075c658ee6f271f394d/coverage-7.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:865965bf955d92790f1facd64fe7ff73551bd2c1e7e6b26443934e9701ba30b9", size = 219203, upload-time = "2025-10-15T15:12:59.902Z" }, + { url = "https://files.pythonhosted.org/packages/03/21/5ce8b3a0133179115af4c041abf2ee652395837cb896614beb8ce8ddcfd9/coverage-7.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:5693e57a065760dcbeb292d60cc4d0231a6d4b6b6f6a3191561e1d5e8820b745", size = 217879, upload-time = "2025-10-15T15:13:01.35Z" }, + { url = "https://files.pythonhosted.org/packages/c4/db/86f6906a7c7edc1a52b2c6682d6dd9be775d73c0dfe2b84f8923dfea5784/coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1", size = 216098, upload-time = "2025-10-15T15:13:02.916Z" }, + { url = "https://files.pythonhosted.org/packages/21/54/e7b26157048c7ba555596aad8569ff903d6cd67867d41b75287323678ede/coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007", size = 216331, upload-time = "2025-10-15T15:13:04.403Z" }, + { url = "https://files.pythonhosted.org/packages/b9/19/1ce6bf444f858b83a733171306134a0544eaddf1ca8851ede6540a55b2ad/coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46", size = 247825, upload-time = "2025-10-15T15:13:05.92Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/d3bcbbc259fcced5fb67c5d78f6e7ee965f49760c14afd931e9e663a83b2/coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893", size = 250573, upload-time = "2025-10-15T15:13:07.471Z" }, + { url = "https://files.pythonhosted.org/packages/58/8d/b0ff3641a320abb047258d36ed1c21d16be33beed4152628331a1baf3365/coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115", size = 251706, upload-time = "2025-10-15T15:13:09.4Z" }, + { url = "https://files.pythonhosted.org/packages/59/c8/5a586fe8c7b0458053d9c687f5cff515a74b66c85931f7fe17a1c958b4ac/coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415", size = 248221, upload-time = "2025-10-15T15:13:10.964Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ff/3a25e3132804ba44cfa9a778cdf2b73dbbe63ef4b0945e39602fc896ba52/coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186", size = 249624, upload-time = "2025-10-15T15:13:12.5Z" }, + { url = "https://files.pythonhosted.org/packages/c5/12/ff10c8ce3895e1b17a73485ea79ebc1896a9e466a9d0f4aef63e0d17b718/coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d", size = 247744, upload-time = "2025-10-15T15:13:14.554Z" }, + { url = "https://files.pythonhosted.org/packages/16/02/d500b91f5471b2975947e0629b8980e5e90786fe316b6d7299852c1d793d/coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d", size = 247325, upload-time = "2025-10-15T15:13:16.438Z" }, + { url = "https://files.pythonhosted.org/packages/77/11/dee0284fbbd9cd64cfce806b827452c6df3f100d9e66188e82dfe771d4af/coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2", size = 249180, upload-time = "2025-10-15T15:13:17.959Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/cdf1def928f0a150a057cab03286774e73e29c2395f0d30ce3d9e9f8e697/coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5", size = 218479, upload-time = "2025-10-15T15:13:19.608Z" }, + { url = "https://files.pythonhosted.org/packages/ff/55/e5884d55e031da9c15b94b90a23beccc9d6beee65e9835cd6da0a79e4f3a/coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0", size = 219290, upload-time = "2025-10-15T15:13:21.593Z" }, + { url = "https://files.pythonhosted.org/packages/23/a8/faa930cfc71c1d16bc78f9a19bb73700464f9c331d9e547bfbc1dbd3a108/coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad", size = 217924, upload-time = "2025-10-15T15:13:23.39Z" }, + { url = "https://files.pythonhosted.org/packages/60/7f/85e4dfe65e400645464b25c036a26ac226cf3a69d4a50c3934c532491cdd/coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1", size = 216129, upload-time = "2025-10-15T15:13:25.371Z" }, + { url = "https://files.pythonhosted.org/packages/96/5d/dc5fa98fea3c175caf9d360649cb1aa3715e391ab00dc78c4c66fabd7356/coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be", size = 216380, upload-time = "2025-10-15T15:13:26.976Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f5/3da9cc9596708273385189289c0e4d8197d37a386bdf17619013554b3447/coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d", size = 247375, upload-time = "2025-10-15T15:13:28.923Z" }, + { url = "https://files.pythonhosted.org/packages/65/6c/f7f59c342359a235559d2bc76b0c73cfc4bac7d61bb0df210965cb1ecffd/coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82", size = 249978, upload-time = "2025-10-15T15:13:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/e7/8c/042dede2e23525e863bf1ccd2b92689692a148d8b5fd37c37899ba882645/coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52", size = 251253, upload-time = "2025-10-15T15:13:32.174Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a9/3c58df67bfa809a7bddd786356d9c5283e45d693edb5f3f55d0986dd905a/coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b", size = 247591, upload-time = "2025-10-15T15:13:34.147Z" }, + { url = "https://files.pythonhosted.org/packages/26/5b/c7f32efd862ee0477a18c41e4761305de6ddd2d49cdeda0c1116227570fd/coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4", size = 249411, upload-time = "2025-10-15T15:13:38.425Z" }, + { url = "https://files.pythonhosted.org/packages/76/b5/78cb4f1e86c1611431c990423ec0768122905b03837e1b4c6a6f388a858b/coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd", size = 247303, upload-time = "2025-10-15T15:13:40.464Z" }, + { url = "https://files.pythonhosted.org/packages/87/c9/23c753a8641a330f45f221286e707c427e46d0ffd1719b080cedc984ec40/coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc", size = 247157, upload-time = "2025-10-15T15:13:42.087Z" }, + { url = "https://files.pythonhosted.org/packages/c5/42/6e0cc71dc8a464486e944a4fa0d85bdec031cc2969e98ed41532a98336b9/coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48", size = 248921, upload-time = "2025-10-15T15:13:43.715Z" }, + { url = "https://files.pythonhosted.org/packages/e8/1c/743c2ef665e6858cccb0f84377dfe3a4c25add51e8c7ef19249be92465b6/coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040", size = 218526, upload-time = "2025-10-15T15:13:45.336Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d5/226daadfd1bf8ddbccefbd3aa3547d7b960fb48e1bdac124e2dd13a2b71a/coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05", size = 219317, upload-time = "2025-10-15T15:13:47.401Z" }, + { url = "https://files.pythonhosted.org/packages/97/54/47db81dcbe571a48a298f206183ba8a7ba79200a37cd0d9f4788fcd2af4a/coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a", size = 217948, upload-time = "2025-10-15T15:13:49.096Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8b/cb68425420154e7e2a82fd779a8cc01549b6fa83c2ad3679cd6c088ebd07/coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b", size = 216837, upload-time = "2025-10-15T15:13:51.09Z" }, + { url = "https://files.pythonhosted.org/packages/33/55/9d61b5765a025685e14659c8d07037247de6383c0385757544ffe4606475/coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37", size = 217061, upload-time = "2025-10-15T15:13:52.747Z" }, + { url = "https://files.pythonhosted.org/packages/52/85/292459c9186d70dcec6538f06ea251bc968046922497377bf4a1dc9a71de/coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de", size = 258398, upload-time = "2025-10-15T15:13:54.45Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e2/46edd73fb8bf51446c41148d81944c54ed224854812b6ca549be25113ee0/coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f", size = 260574, upload-time = "2025-10-15T15:13:56.145Z" }, + { url = "https://files.pythonhosted.org/packages/07/5e/1df469a19007ff82e2ca8fe509822820a31e251f80ee7344c34f6cd2ec43/coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c", size = 262797, upload-time = "2025-10-15T15:13:58.635Z" }, + { url = "https://files.pythonhosted.org/packages/f9/50/de216b31a1434b94d9b34a964c09943c6be45069ec704bfc379d8d89a649/coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa", size = 257361, upload-time = "2025-10-15T15:14:00.409Z" }, + { url = "https://files.pythonhosted.org/packages/82/1e/3f9f8344a48111e152e0fd495b6fff13cc743e771a6050abf1627a7ba918/coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740", size = 260349, upload-time = "2025-10-15T15:14:02.188Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/3f52741f9e7d82124272f3070bbe316006a7de1bad1093f88d59bfc6c548/coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef", size = 258114, upload-time = "2025-10-15T15:14:03.907Z" }, + { url = "https://files.pythonhosted.org/packages/0b/8b/918f0e15f0365d50d3986bbd3338ca01178717ac5678301f3f547b6619e6/coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0", size = 256723, upload-time = "2025-10-15T15:14:06.324Z" }, + { url = "https://files.pythonhosted.org/packages/44/9e/7776829f82d3cf630878a7965a7d70cc6ca94f22c7d20ec4944f7148cb46/coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca", size = 259238, upload-time = "2025-10-15T15:14:08.002Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b8/49cf253e1e7a3bedb85199b201862dd7ca4859f75b6cf25ffa7298aa0760/coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2", size = 219180, upload-time = "2025-10-15T15:14:09.786Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e1/1a541703826be7ae2125a0fb7f821af5729d56bb71e946e7b933cc7a89a4/coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268", size = 220241, upload-time = "2025-10-15T15:14:11.471Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d1/5ee0e0a08621140fd418ec4020f595b4d52d7eb429ae6a0c6542b4ba6f14/coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836", size = 218510, upload-time = "2025-10-15T15:14:13.46Z" }, + { url = "https://files.pythonhosted.org/packages/f4/06/e923830c1985ce808e40a3fa3eb46c13350b3224b7da59757d37b6ce12b8/coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497", size = 216110, upload-time = "2025-10-15T15:14:15.157Z" }, + { url = "https://files.pythonhosted.org/packages/42/82/cdeed03bfead45203fb651ed756dfb5266028f5f939e7f06efac4041dad5/coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e", size = 216395, upload-time = "2025-10-15T15:14:16.863Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ba/e1c80caffc3199aa699813f73ff097bc2df7b31642bdbc7493600a8f1de5/coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1", size = 247433, upload-time = "2025-10-15T15:14:18.589Z" }, + { url = "https://files.pythonhosted.org/packages/80/c0/5b259b029694ce0a5bbc1548834c7ba3db41d3efd3474489d7efce4ceb18/coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca", size = 249970, upload-time = "2025-10-15T15:14:20.307Z" }, + { url = "https://files.pythonhosted.org/packages/8c/86/171b2b5e1aac7e2fd9b43f7158b987dbeb95f06d1fbecad54ad8163ae3e8/coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd", size = 251324, upload-time = "2025-10-15T15:14:22.419Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7e/7e10414d343385b92024af3932a27a1caf75c6e27ee88ba211221ff1a145/coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43", size = 247445, upload-time = "2025-10-15T15:14:24.205Z" }, + { url = "https://files.pythonhosted.org/packages/c4/3b/e4f966b21f5be8c4bf86ad75ae94efa0de4c99c7bbb8114476323102e345/coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777", size = 249324, upload-time = "2025-10-15T15:14:26.234Z" }, + { url = "https://files.pythonhosted.org/packages/00/a2/8479325576dfcd909244d0df215f077f47437ab852ab778cfa2f8bf4d954/coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2", size = 247261, upload-time = "2025-10-15T15:14:28.42Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d8/3a9e2db19d94d65771d0f2e21a9ea587d11b831332a73622f901157cc24b/coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d", size = 247092, upload-time = "2025-10-15T15:14:30.784Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b1/bbca3c472544f9e2ad2d5116b2379732957048be4b93a9c543fcd0207e5f/coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4", size = 248755, upload-time = "2025-10-15T15:14:32.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/49/638d5a45a6a0f00af53d6b637c87007eb2297042186334e9923a61aa8854/coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721", size = 218793, upload-time = "2025-10-15T15:14:34.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/cc/b675a51f2d068adb3cdf3799212c662239b0ca27f4691d1fff81b92ea850/coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad", size = 219587, upload-time = "2025-10-15T15:14:37.047Z" }, + { url = "https://files.pythonhosted.org/packages/93/98/5ac886876026de04f00820e5094fe22166b98dcb8b426bf6827aaf67048c/coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479", size = 218168, upload-time = "2025-10-15T15:14:38.861Z" }, + { url = "https://files.pythonhosted.org/packages/14/d1/b4145d35b3e3ecf4d917e97fc8895bcf027d854879ba401d9ff0f533f997/coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f", size = 216850, upload-time = "2025-10-15T15:14:40.651Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d1/7f645fc2eccd318369a8a9948acc447bb7c1ade2911e31d3c5620544c22b/coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e", size = 217071, upload-time = "2025-10-15T15:14:42.755Z" }, + { url = "https://files.pythonhosted.org/packages/54/7d/64d124649db2737ceced1dfcbdcb79898d5868d311730f622f8ecae84250/coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44", size = 258570, upload-time = "2025-10-15T15:14:44.542Z" }, + { url = "https://files.pythonhosted.org/packages/6c/3f/6f5922f80dc6f2d8b2c6f974835c43f53eb4257a7797727e6ca5b7b2ec1f/coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3", size = 260738, upload-time = "2025-10-15T15:14:46.436Z" }, + { url = "https://files.pythonhosted.org/packages/0e/5f/9e883523c4647c860b3812b417a2017e361eca5b635ee658387dc11b13c1/coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b", size = 262994, upload-time = "2025-10-15T15:14:48.3Z" }, + { url = "https://files.pythonhosted.org/packages/07/bb/43b5a8e94c09c8bf51743ffc65c4c841a4ca5d3ed191d0a6919c379a1b83/coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d", size = 257282, upload-time = "2025-10-15T15:14:50.236Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e5/0ead8af411411330b928733e1d201384b39251a5f043c1612970310e8283/coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2", size = 260430, upload-time = "2025-10-15T15:14:52.413Z" }, + { url = "https://files.pythonhosted.org/packages/ae/66/03dd8bb0ba5b971620dcaac145461950f6d8204953e535d2b20c6b65d729/coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e", size = 258190, upload-time = "2025-10-15T15:14:54.268Z" }, + { url = "https://files.pythonhosted.org/packages/45/ae/28a9cce40bf3174426cb2f7e71ee172d98e7f6446dff936a7ccecee34b14/coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996", size = 256658, upload-time = "2025-10-15T15:14:56.436Z" }, + { url = "https://files.pythonhosted.org/packages/5c/7c/3a44234a8599513684bfc8684878fd7b126c2760f79712bb78c56f19efc4/coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11", size = 259342, upload-time = "2025-10-15T15:14:58.538Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e6/0108519cba871af0351725ebdb8660fd7a0fe2ba3850d56d32490c7d9b4b/coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73", size = 219568, upload-time = "2025-10-15T15:15:00.382Z" }, + { url = "https://files.pythonhosted.org/packages/c9/76/44ba876e0942b4e62fdde23ccb029ddb16d19ba1bef081edd00857ba0b16/coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547", size = 220687, upload-time = "2025-10-15T15:15:02.322Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0c/0df55ecb20d0d0ed5c322e10a441775e1a3a5d78c60f0c4e1abfe6fcf949/coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3", size = 218711, upload-time = "2025-10-15T15:15:04.575Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/642c1d8a448ae5ea1369eac8495740a79eb4e581a9fb0cbdce56bbf56da1/coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68", size = 207761, upload-time = "2025-10-15T15:15:06.439Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "execnet" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524, upload-time = "2024-04-08T09:04:19.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612, upload-time = "2024-04-08T09:04:17.414Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jiter" +version = "0.11.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/68/0357982493a7b20925aece061f7fb7a2678e3b232f8d73a6edb7e5304443/jiter-0.11.1.tar.gz", hash = "sha256:849dcfc76481c0ea0099391235b7ca97d7279e0fa4c86005457ac7c88e8b76dc", size = 168385, upload-time = "2025-10-17T11:31:15.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/10/d099def5716452c8d5ffa527405373a44ddaf8e3c9d4f6de1e1344cffd90/jiter-0.11.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ed58841a491bbbf3f7c55a6b68fff568439ab73b2cce27ace0e169057b5851df", size = 310078, upload-time = "2025-10-17T11:28:36.186Z" }, + { url = "https://files.pythonhosted.org/packages/fe/56/b81d010b0031ffa96dfb590628562ac5f513ce56aa2ab451d29fb3fedeb9/jiter-0.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:499beb9b2d7e51d61095a8de39ebcab1d1778f2a74085f8305a969f6cee9f3e4", size = 317138, upload-time = "2025-10-17T11:28:38.294Z" }, + { url = "https://files.pythonhosted.org/packages/89/12/31ea12af9d79671cc7bd893bf0ccaf3467624c0fc7146a0cbfe7b549bcfa/jiter-0.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b87b2821795e28cc990939b68ce7a038edea680a24910bd68a79d54ff3f03c02", size = 348964, upload-time = "2025-10-17T11:28:40.103Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/95cb6dc5ff962410667a29708c7a6c0691cc3c4866a0bfa79d085b56ebd6/jiter-0.11.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:83f6fa494d8bba14ab100417c80e70d32d737e805cb85be2052d771c76fcd1f8", size = 363289, upload-time = "2025-10-17T11:28:41.49Z" }, + { url = "https://files.pythonhosted.org/packages/b8/3e/37006ad5843a0bc3a3ec3a6c44710d7a154113befaf5f26d2fe190668b63/jiter-0.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fbc6aea1daa2ec6f5ed465f0c5e7b0607175062ceebbea5ca70dd5ddab58083", size = 487243, upload-time = "2025-10-17T11:28:43.209Z" }, + { url = "https://files.pythonhosted.org/packages/80/5c/d38c8c801a322a0c0de47b9618c16fd766366f087ce37c4e55ae8e3c8b03/jiter-0.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:302288e2edc43174bb2db838e94688d724f9aad26c5fb9a74f7a5fb427452a6a", size = 376139, upload-time = "2025-10-17T11:28:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/b0/cd/442ad2389a5570b0ee673f93e14bbe8cdecd3e08a9ba7756081d84065e4c/jiter-0.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85db563fe3b367bb568af5d29dea4d4066d923b8e01f3417d25ebecd958de815", size = 359279, upload-time = "2025-10-17T11:28:46.152Z" }, + { url = "https://files.pythonhosted.org/packages/9a/35/8f5810d0e7d00bc395889085dbc1ccc36d454b56f28b2a5359dfd1bab48d/jiter-0.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1c1ba2b6b22f775444ef53bc2d5778396d3520abc7b2e1da8eb0c27cb3ffb10", size = 384911, upload-time = "2025-10-17T11:28:48.03Z" }, + { url = "https://files.pythonhosted.org/packages/3c/bd/8c069ceb0bafcf6b4aa5de0c27f02faf50468df39564a02e1a12389ad6c2/jiter-0.11.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:523be464b14f8fd0cc78da6964b87b5515a056427a2579f9085ce30197a1b54a", size = 517879, upload-time = "2025-10-17T11:28:49.902Z" }, + { url = "https://files.pythonhosted.org/packages/bc/3c/9163efcf762f79f47433078b4f0a1bddc56096082c02c6cae2f47f07f56f/jiter-0.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:25b99b3f04cd2a38fefb22e822e35eb203a2cd37d680dbbc0c0ba966918af336", size = 508739, upload-time = "2025-10-17T11:28:51.785Z" }, + { url = "https://files.pythonhosted.org/packages/44/07/50690f257935845d3114b95b5dd03749eeaab5e395cbb522f9e957da4551/jiter-0.11.1-cp310-cp310-win32.whl", hash = "sha256:47a79e90545a596bb9104109777894033347b11180d4751a216afef14072dbe7", size = 203948, upload-time = "2025-10-17T11:28:54.368Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3a/5964a944bf2e98ffd566153fdc2a6a368fcb11b58cc46832ca8c75808dba/jiter-0.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:cace75621ae9bd66878bf69fbd4dfc1a28ef8661e0c2d0eb72d3d6f1268eddf5", size = 207522, upload-time = "2025-10-17T11:28:56.79Z" }, + { url = "https://files.pythonhosted.org/packages/8b/34/c9e6cfe876f9a24f43ed53fe29f052ce02bd8d5f5a387dbf46ad3764bef0/jiter-0.11.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9b0088ff3c374ce8ce0168523ec8e97122ebb788f950cf7bb8e39c7dc6a876a2", size = 310160, upload-time = "2025-10-17T11:28:59.174Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9f/b06ec8181d7165858faf2ac5287c54fe52b2287760b7fe1ba9c06890255f/jiter-0.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74433962dd3c3090655e02e461267095d6c84f0741c7827de11022ef8d7ff661", size = 316573, upload-time = "2025-10-17T11:29:00.905Z" }, + { url = "https://files.pythonhosted.org/packages/66/49/3179d93090f2ed0c6b091a9c210f266d2d020d82c96f753260af536371d0/jiter-0.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d98030e345e6546df2cc2c08309c502466c66c4747b043f1a0d415fada862b8", size = 348998, upload-time = "2025-10-17T11:29:02.321Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9d/63db2c8eabda7a9cad65a2e808ca34aaa8689d98d498f5a2357d7a2e2cec/jiter-0.11.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d6db0b2e788db46bec2cf729a88b6dd36959af2abd9fa2312dfba5acdd96dcb", size = 363413, upload-time = "2025-10-17T11:29:03.787Z" }, + { url = "https://files.pythonhosted.org/packages/25/ff/3e6b3170c5053053c7baddb8d44e2bf11ff44cd71024a280a8438ae6ba32/jiter-0.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55678fbbda261eafe7289165dd2ddd0e922df5f9a1ae46d7c79a5a15242bd7d1", size = 487144, upload-time = "2025-10-17T11:29:05.37Z" }, + { url = "https://files.pythonhosted.org/packages/b0/50/b63fcadf699893269b997f4c2e88400bc68f085c6db698c6e5e69d63b2c1/jiter-0.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a6b74fae8e40497653b52ce6ca0f1b13457af769af6fb9c1113efc8b5b4d9be", size = 376215, upload-time = "2025-10-17T11:29:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/39/8c/57a8a89401134167e87e73471b9cca321cf651c1fd78c45f3a0f16932213/jiter-0.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a55a453f8b035eb4f7852a79a065d616b7971a17f5e37a9296b4b38d3b619e4", size = 359163, upload-time = "2025-10-17T11:29:09.047Z" }, + { url = "https://files.pythonhosted.org/packages/4b/96/30b0cdbffbb6f753e25339d3dbbe26890c9ef119928314578201c758aace/jiter-0.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2638148099022e6bdb3f42904289cd2e403609356fb06eb36ddec2d50958bc29", size = 385344, upload-time = "2025-10-17T11:29:10.69Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d5/31dae27c1cc9410ad52bb514f11bfa4f286f7d6ef9d287b98b8831e156ec/jiter-0.11.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:252490567a5d990986f83b95a5f1ca1bf205ebd27b3e9e93bb7c2592380e29b9", size = 517972, upload-time = "2025-10-17T11:29:12.174Z" }, + { url = "https://files.pythonhosted.org/packages/61/1e/5905a7a3aceab80de13ab226fd690471a5e1ee7e554dc1015e55f1a6b896/jiter-0.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d431d52b0ca2436eea6195f0f48528202100c7deda354cb7aac0a302167594d5", size = 508408, upload-time = "2025-10-17T11:29:13.597Z" }, + { url = "https://files.pythonhosted.org/packages/91/12/1c49b97aa49077e136e8591cef7162f0d3e2860ae457a2d35868fd1521ef/jiter-0.11.1-cp311-cp311-win32.whl", hash = "sha256:db6f41e40f8bae20c86cb574b48c4fd9f28ee1c71cb044e9ec12e78ab757ba3a", size = 203937, upload-time = "2025-10-17T11:29:14.894Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9d/2255f7c17134ee9892c7e013c32d5bcf4bce64eb115402c9fe5e727a67eb/jiter-0.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0cc407b8e6cdff01b06bb80f61225c8b090c3df108ebade5e0c3c10993735b19", size = 207589, upload-time = "2025-10-17T11:29:16.166Z" }, + { url = "https://files.pythonhosted.org/packages/3c/28/6307fc8f95afef84cae6caf5429fee58ef16a582c2ff4db317ceb3e352fa/jiter-0.11.1-cp311-cp311-win_arm64.whl", hash = "sha256:fe04ea475392a91896d1936367854d346724a1045a247e5d1c196410473b8869", size = 188391, upload-time = "2025-10-17T11:29:17.488Z" }, + { url = "https://files.pythonhosted.org/packages/15/8b/318e8af2c904a9d29af91f78c1e18f0592e189bbdb8a462902d31fe20682/jiter-0.11.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c92148eec91052538ce6823dfca9525f5cfc8b622d7f07e9891a280f61b8c96c", size = 305655, upload-time = "2025-10-17T11:29:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/f7/29/6c7de6b5d6e511d9e736312c0c9bfcee8f9b6bef68182a08b1d78767e627/jiter-0.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ecd4da91b5415f183a6be8f7158d127bdd9e6a3174138293c0d48d6ea2f2009d", size = 315645, upload-time = "2025-10-17T11:29:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5f/ef9e5675511ee0eb7f98dd8c90509e1f7743dbb7c350071acae87b0145f3/jiter-0.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7e3ac25c00b9275684d47aa42febaa90a9958e19fd1726c4ecf755fbe5e553b", size = 348003, upload-time = "2025-10-17T11:29:22.712Z" }, + { url = "https://files.pythonhosted.org/packages/56/1b/abe8c4021010b0a320d3c62682769b700fb66f92c6db02d1a1381b3db025/jiter-0.11.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57d7305c0a841858f866cd459cd9303f73883fb5e097257f3d4a3920722c69d4", size = 365122, upload-time = "2025-10-17T11:29:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/2a/2d/4a18013939a4f24432f805fbd5a19893e64650b933edb057cd405275a538/jiter-0.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e86fa10e117dce22c547f31dd6d2a9a222707d54853d8de4e9a2279d2c97f239", size = 488360, upload-time = "2025-10-17T11:29:25.724Z" }, + { url = "https://files.pythonhosted.org/packages/f0/77/38124f5d02ac4131f0dfbcfd1a19a0fac305fa2c005bc4f9f0736914a1a4/jiter-0.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae5ef1d48aec7e01ee8420155d901bb1d192998fa811a65ebb82c043ee186711", size = 376884, upload-time = "2025-10-17T11:29:27.056Z" }, + { url = "https://files.pythonhosted.org/packages/7b/43/59fdc2f6267959b71dd23ce0bd8d4aeaf55566aa435a5d00f53d53c7eb24/jiter-0.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb68e7bf65c990531ad8715e57d50195daf7c8e6f1509e617b4e692af1108939", size = 358827, upload-time = "2025-10-17T11:29:28.698Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/b3cc20ff5340775ea3bbaa0d665518eddecd4266ba7244c9cb480c0c82ec/jiter-0.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43b30c8154ded5845fa454ef954ee67bfccce629b2dea7d01f795b42bc2bda54", size = 385171, upload-time = "2025-10-17T11:29:30.078Z" }, + { url = "https://files.pythonhosted.org/packages/d2/bc/94dd1f3a61f4dc236f787a097360ec061ceeebebf4ea120b924d91391b10/jiter-0.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:586cafbd9dd1f3ce6a22b4a085eaa6be578e47ba9b18e198d4333e598a91db2d", size = 518359, upload-time = "2025-10-17T11:29:31.464Z" }, + { url = "https://files.pythonhosted.org/packages/7e/8c/12ee132bd67e25c75f542c227f5762491b9a316b0dad8e929c95076f773c/jiter-0.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:677cc2517d437a83bb30019fd4cf7cad74b465914c56ecac3440d597ac135250", size = 509205, upload-time = "2025-10-17T11:29:32.895Z" }, + { url = "https://files.pythonhosted.org/packages/39/d5/9de848928ce341d463c7e7273fce90ea6d0ea4343cd761f451860fa16b59/jiter-0.11.1-cp312-cp312-win32.whl", hash = "sha256:fa992af648fcee2b850a3286a35f62bbbaeddbb6dbda19a00d8fbc846a947b6e", size = 205448, upload-time = "2025-10-17T11:29:34.217Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/8002d78637e05009f5e3fb5288f9d57d65715c33b5d6aa20fd57670feef5/jiter-0.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:88b5cae9fa51efeb3d4bd4e52bfd4c85ccc9cac44282e2a9640893a042ba4d87", size = 204285, upload-time = "2025-10-17T11:29:35.446Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a2/bb24d5587e4dff17ff796716542f663deee337358006a80c8af43ddc11e5/jiter-0.11.1-cp312-cp312-win_arm64.whl", hash = "sha256:9a6cae1ab335551917f882f2c3c1efe7617b71b4c02381e4382a8fc80a02588c", size = 188712, upload-time = "2025-10-17T11:29:37.027Z" }, + { url = "https://files.pythonhosted.org/packages/7c/4b/e4dd3c76424fad02a601d570f4f2a8438daea47ba081201a721a903d3f4c/jiter-0.11.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:71b6a920a5550f057d49d0e8bcc60945a8da998019e83f01adf110e226267663", size = 305272, upload-time = "2025-10-17T11:29:39.249Z" }, + { url = "https://files.pythonhosted.org/packages/67/83/2cd3ad5364191130f4de80eacc907f693723beaab11a46c7d155b07a092c/jiter-0.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b3de72e925388453a5171be83379549300db01284f04d2a6f244d1d8de36f94", size = 314038, upload-time = "2025-10-17T11:29:40.563Z" }, + { url = "https://files.pythonhosted.org/packages/d3/3c/8e67d9ba524e97d2f04c8f406f8769a23205026b13b0938d16646d6e2d3e/jiter-0.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc19dd65a2bd3d9c044c5b4ebf657ca1e6003a97c0fc10f555aa4f7fb9821c00", size = 345977, upload-time = "2025-10-17T11:29:42.009Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a5/489ce64d992c29bccbffabb13961bbb0435e890d7f2d266d1f3df5e917d2/jiter-0.11.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d58faaa936743cd1464540562f60b7ce4fd927e695e8bc31b3da5b914baa9abd", size = 364503, upload-time = "2025-10-17T11:29:43.459Z" }, + { url = "https://files.pythonhosted.org/packages/d4/c0/e321dd83ee231d05c8fe4b1a12caf1f0e8c7a949bf4724d58397104f10f2/jiter-0.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:902640c3103625317291cb73773413b4d71847cdf9383ba65528745ff89f1d14", size = 487092, upload-time = "2025-10-17T11:29:44.835Z" }, + { url = "https://files.pythonhosted.org/packages/f9/5e/8f24ec49c8d37bd37f34ec0112e0b1a3b4b5a7b456c8efff1df5e189ad43/jiter-0.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30405f726e4c2ed487b176c09f8b877a957f535d60c1bf194abb8dadedb5836f", size = 376328, upload-time = "2025-10-17T11:29:46.175Z" }, + { url = "https://files.pythonhosted.org/packages/7f/70/ded107620e809327cf7050727e17ccfa79d6385a771b7fe38fb31318ef00/jiter-0.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3217f61728b0baadd2551844870f65219ac4a1285d5e1a4abddff3d51fdabe96", size = 356632, upload-time = "2025-10-17T11:29:47.454Z" }, + { url = "https://files.pythonhosted.org/packages/19/53/c26f7251613f6a9079275ee43c89b8a973a95ff27532c421abc2a87afb04/jiter-0.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1364cc90c03a8196f35f396f84029f12abe925415049204446db86598c8b72c", size = 384358, upload-time = "2025-10-17T11:29:49.377Z" }, + { url = "https://files.pythonhosted.org/packages/84/16/e0f2cc61e9c4d0b62f6c1bd9b9781d878a427656f88293e2a5335fa8ff07/jiter-0.11.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:53a54bf8e873820ab186b2dca9f6c3303f00d65ae5e7b7d6bda1b95aa472d646", size = 517279, upload-time = "2025-10-17T11:29:50.968Z" }, + { url = "https://files.pythonhosted.org/packages/60/5c/4cd095eaee68961bca3081acbe7c89e12ae24a5dae5fd5d2a13e01ed2542/jiter-0.11.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7e29aca023627b0e0c2392d4248f6414d566ff3974fa08ff2ac8dbb96dfee92a", size = 508276, upload-time = "2025-10-17T11:29:52.619Z" }, + { url = "https://files.pythonhosted.org/packages/4f/25/f459240e69b0e09a7706d96ce203ad615ca36b0fe832308d2b7123abf2d0/jiter-0.11.1-cp313-cp313-win32.whl", hash = "sha256:f153e31d8bca11363751e875c0a70b3d25160ecbaee7b51e457f14498fb39d8b", size = 205593, upload-time = "2025-10-17T11:29:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/461bafe22bae79bab74e217a09c907481a46d520c36b7b9fe71ee8c9e983/jiter-0.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:f773f84080b667c69c4ea0403fc67bb08b07e2b7ce1ef335dea5868451e60fed", size = 203518, upload-time = "2025-10-17T11:29:55.216Z" }, + { url = "https://files.pythonhosted.org/packages/7b/72/c45de6e320edb4fa165b7b1a414193b3cae302dd82da2169d315dcc78b44/jiter-0.11.1-cp313-cp313-win_arm64.whl", hash = "sha256:635ecd45c04e4c340d2187bcb1cea204c7cc9d32c1364d251564bf42e0e39c2d", size = 188062, upload-time = "2025-10-17T11:29:56.631Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/4a57922437ca8753ef823f434c2dec5028b237d84fa320f06a3ba1aec6e8/jiter-0.11.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d892b184da4d94d94ddb4031296931c74ec8b325513a541ebfd6dfb9ae89904b", size = 313814, upload-time = "2025-10-17T11:29:58.509Z" }, + { url = "https://files.pythonhosted.org/packages/76/50/62a0683dadca25490a4bedc6a88d59de9af2a3406dd5a576009a73a1d392/jiter-0.11.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa22c223a3041dacb2fcd37c70dfd648b44662b4a48e242592f95bda5ab09d58", size = 344987, upload-time = "2025-10-17T11:30:00.208Z" }, + { url = "https://files.pythonhosted.org/packages/da/00/2355dbfcbf6cdeaddfdca18287f0f38ae49446bb6378e4a5971e9356fc8a/jiter-0.11.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:330e8e6a11ad4980cd66a0f4a3e0e2e0f646c911ce047014f984841924729789", size = 356399, upload-time = "2025-10-17T11:30:02.084Z" }, + { url = "https://files.pythonhosted.org/packages/c9/07/c2bd748d578fa933d894a55bff33f983bc27f75fc4e491b354bef7b78012/jiter-0.11.1-cp313-cp313t-win_amd64.whl", hash = "sha256:09e2e386ebf298547ca3a3704b729471f7ec666c2906c5c26c1a915ea24741ec", size = 203289, upload-time = "2025-10-17T11:30:03.656Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ee/ace64a853a1acbd318eb0ca167bad1cf5ee037207504b83a868a5849747b/jiter-0.11.1-cp313-cp313t-win_arm64.whl", hash = "sha256:fe4a431c291157e11cee7c34627990ea75e8d153894365a3bc84b7a959d23ca8", size = 188284, upload-time = "2025-10-17T11:30:05.046Z" }, + { url = "https://files.pythonhosted.org/packages/8d/00/d6006d069e7b076e4c66af90656b63da9481954f290d5eca8c715f4bf125/jiter-0.11.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0fa1f70da7a8a9713ff8e5f75ec3f90c0c870be6d526aa95e7c906f6a1c8c676", size = 304624, upload-time = "2025-10-17T11:30:06.678Z" }, + { url = "https://files.pythonhosted.org/packages/fc/45/4a0e31eb996b9ccfddbae4d3017b46f358a599ccf2e19fbffa5e531bd304/jiter-0.11.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:569ee559e5046a42feb6828c55307cf20fe43308e3ae0d8e9e4f8d8634d99944", size = 315042, upload-time = "2025-10-17T11:30:08.87Z" }, + { url = "https://files.pythonhosted.org/packages/e7/91/22f5746f5159a28c76acdc0778801f3c1181799aab196dbea2d29e064968/jiter-0.11.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f69955fa1d92e81987f092b233f0be49d4c937da107b7f7dcf56306f1d3fcce9", size = 346357, upload-time = "2025-10-17T11:30:10.222Z" }, + { url = "https://files.pythonhosted.org/packages/f5/4f/57620857d4e1dc75c8ff4856c90cb6c135e61bff9b4ebfb5dc86814e82d7/jiter-0.11.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:090f4c9d4a825e0fcbd0a2647c9a88a0f366b75654d982d95a9590745ff0c48d", size = 365057, upload-time = "2025-10-17T11:30:11.585Z" }, + { url = "https://files.pythonhosted.org/packages/ce/34/caf7f9cc8ae0a5bb25a5440cc76c7452d264d1b36701b90fdadd28fe08ec/jiter-0.11.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf3d8cedf9e9d825233e0dcac28ff15c47b7c5512fdfe2e25fd5bbb6e6b0cee", size = 487086, upload-time = "2025-10-17T11:30:13.052Z" }, + { url = "https://files.pythonhosted.org/packages/50/17/85b5857c329d533d433fedf98804ebec696004a1f88cabad202b2ddc55cf/jiter-0.11.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2aa9b1958f9c30d3d1a558b75f0626733c60eb9b7774a86b34d88060be1e67fe", size = 376083, upload-time = "2025-10-17T11:30:14.416Z" }, + { url = "https://files.pythonhosted.org/packages/85/d3/2d9f973f828226e6faebdef034097a2918077ea776fb4d88489949024787/jiter-0.11.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42d1ca16590b768c5e7d723055acd2633908baacb3628dd430842e2e035aa90", size = 357825, upload-time = "2025-10-17T11:30:15.765Z" }, + { url = "https://files.pythonhosted.org/packages/f4/55/848d4dabf2c2c236a05468c315c2cb9dc736c5915e65449ccecdba22fb6f/jiter-0.11.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5db4c2486a023820b701a17aec9c5a6173c5ba4393f26662f032f2de9c848b0f", size = 383933, upload-time = "2025-10-17T11:30:17.34Z" }, + { url = "https://files.pythonhosted.org/packages/0b/6c/204c95a4fbb0e26dfa7776c8ef4a878d0c0b215868011cc904bf44f707e2/jiter-0.11.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:4573b78777ccfac954859a6eff45cbd9d281d80c8af049d0f1a3d9fc323d5c3a", size = 517118, upload-time = "2025-10-17T11:30:18.684Z" }, + { url = "https://files.pythonhosted.org/packages/88/25/09956644ea5a2b1e7a2a0f665cb69a973b28f4621fa61fc0c0f06ff40a31/jiter-0.11.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:7593ac6f40831d7961cb67633c39b9fef6689a211d7919e958f45710504f52d3", size = 508194, upload-time = "2025-10-17T11:30:20.719Z" }, + { url = "https://files.pythonhosted.org/packages/09/49/4d1657355d7f5c9e783083a03a3f07d5858efa6916a7d9634d07db1c23bd/jiter-0.11.1-cp314-cp314-win32.whl", hash = "sha256:87202ec6ff9626ff5f9351507def98fcf0df60e9a146308e8ab221432228f4ea", size = 203961, upload-time = "2025-10-17T11:30:22.073Z" }, + { url = "https://files.pythonhosted.org/packages/76/bd/f063bd5cc2712e7ca3cf6beda50894418fc0cfeb3f6ff45a12d87af25996/jiter-0.11.1-cp314-cp314-win_amd64.whl", hash = "sha256:a5dd268f6531a182c89d0dd9a3f8848e86e92dfff4201b77a18e6b98aa59798c", size = 202804, upload-time = "2025-10-17T11:30:23.452Z" }, + { url = "https://files.pythonhosted.org/packages/52/ca/4d84193dfafef1020bf0bedd5e1a8d0e89cb67c54b8519040effc694964b/jiter-0.11.1-cp314-cp314-win_arm64.whl", hash = "sha256:5d761f863f912a44748a21b5c4979c04252588ded8d1d2760976d2e42cd8d991", size = 188001, upload-time = "2025-10-17T11:30:24.915Z" }, + { url = "https://files.pythonhosted.org/packages/d5/fa/3b05e5c9d32efc770a8510eeb0b071c42ae93a5b576fd91cee9af91689a1/jiter-0.11.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2cc5a3965285ddc33e0cab933e96b640bc9ba5940cea27ebbbf6695e72d6511c", size = 312561, upload-time = "2025-10-17T11:30:26.742Z" }, + { url = "https://files.pythonhosted.org/packages/50/d3/335822eb216154ddb79a130cbdce88fdf5c3e2b43dc5dba1fd95c485aaf5/jiter-0.11.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b572b3636a784c2768b2342f36a23078c8d3aa6d8a30745398b1bab58a6f1a8", size = 344551, upload-time = "2025-10-17T11:30:28.252Z" }, + { url = "https://files.pythonhosted.org/packages/31/6d/a0bed13676b1398f9b3ba61f32569f20a3ff270291161100956a577b2dd3/jiter-0.11.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad93e3d67a981f96596d65d2298fe8d1aa649deb5374a2fb6a434410ee11915e", size = 363051, upload-time = "2025-10-17T11:30:30.009Z" }, + { url = "https://files.pythonhosted.org/packages/a4/03/313eda04aa08545a5a04ed5876e52f49ab76a4d98e54578896ca3e16313e/jiter-0.11.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a83097ce379e202dcc3fe3fc71a16d523d1ee9192c8e4e854158f96b3efe3f2f", size = 485897, upload-time = "2025-10-17T11:30:31.429Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/a1011b9d325e40b53b1b96a17c010b8646013417f3902f97a86325b19299/jiter-0.11.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7042c51e7fbeca65631eb0c332f90c0c082eab04334e7ccc28a8588e8e2804d9", size = 375224, upload-time = "2025-10-17T11:30:33.18Z" }, + { url = "https://files.pythonhosted.org/packages/92/da/1b45026b19dd39b419e917165ff0ea629dbb95f374a3a13d2df95e40a6ac/jiter-0.11.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a68d679c0e47649a61df591660507608adc2652442de7ec8276538ac46abe08", size = 356606, upload-time = "2025-10-17T11:30:34.572Z" }, + { url = "https://files.pythonhosted.org/packages/7a/0c/9acb0e54d6a8ba59ce923a180ebe824b4e00e80e56cefde86cc8e0a948be/jiter-0.11.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a1b0da75dbf4b6ec0b3c9e604d1ee8beaf15bc046fff7180f7d89e3cdbd3bb51", size = 384003, upload-time = "2025-10-17T11:30:35.987Z" }, + { url = "https://files.pythonhosted.org/packages/3f/2b/e5a5fe09d6da2145e4eed651e2ce37f3c0cf8016e48b1d302e21fb1628b7/jiter-0.11.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:69dd514bf0fa31c62147d6002e5ca2b3e7ef5894f5ac6f0a19752385f4e89437", size = 516946, upload-time = "2025-10-17T11:30:37.425Z" }, + { url = "https://files.pythonhosted.org/packages/5f/fe/db936e16e0228d48eb81f9934e8327e9fde5185e84f02174fcd22a01be87/jiter-0.11.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:bb31ac0b339efa24c0ca606febd8b77ef11c58d09af1b5f2be4c99e907b11111", size = 507614, upload-time = "2025-10-17T11:30:38.977Z" }, + { url = "https://files.pythonhosted.org/packages/86/db/c4438e8febfb303486d13c6b72f5eb71cf851e300a0c1f0b4140018dd31f/jiter-0.11.1-cp314-cp314t-win32.whl", hash = "sha256:b2ce0d6156a1d3ad41da3eec63b17e03e296b78b0e0da660876fccfada86d2f7", size = 204043, upload-time = "2025-10-17T11:30:40.308Z" }, + { url = "https://files.pythonhosted.org/packages/36/59/81badb169212f30f47f817dfaabf965bc9b8204fed906fab58104ee541f9/jiter-0.11.1-cp314-cp314t-win_amd64.whl", hash = "sha256:f4db07d127b54c4a2d43b4cf05ff0193e4f73e0dd90c74037e16df0b29f666e1", size = 204046, upload-time = "2025-10-17T11:30:41.692Z" }, + { url = "https://files.pythonhosted.org/packages/dd/01/43f7b4eb61db3e565574c4c5714685d042fb652f9eef7e5a3de6aafa943a/jiter-0.11.1-cp314-cp314t-win_arm64.whl", hash = "sha256:28e4fdf2d7ebfc935523e50d1efa3970043cfaa161674fe66f9642409d001dfe", size = 188069, upload-time = "2025-10-17T11:30:43.23Z" }, + { url = "https://files.pythonhosted.org/packages/9d/51/bd41562dd284e2a18b6dc0a99d195fd4a3560d52ab192c42e56fe0316643/jiter-0.11.1-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:e642b5270e61dd02265866398707f90e365b5db2eb65a4f30c789d826682e1f6", size = 306871, upload-time = "2025-10-17T11:31:03.616Z" }, + { url = "https://files.pythonhosted.org/packages/ba/cb/64e7f21dd357e8cd6b3c919c26fac7fc198385bbd1d85bb3b5355600d787/jiter-0.11.1-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:464ba6d000585e4e2fd1e891f31f1231f497273414f5019e27c00a4b8f7a24ad", size = 301454, upload-time = "2025-10-17T11:31:05.338Z" }, + { url = "https://files.pythonhosted.org/packages/55/b0/54bdc00da4ef39801b1419a01035bd8857983de984fd3776b0be6b94add7/jiter-0.11.1-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:055568693ab35e0bf3a171b03bb40b2dcb10352359e0ab9b5ed0da2bf1eb6f6f", size = 336801, upload-time = "2025-10-17T11:31:06.893Z" }, + { url = "https://files.pythonhosted.org/packages/de/8f/87176ed071d42e9db415ed8be787ef4ef31a4fa27f52e6a4fbf34387bd28/jiter-0.11.1-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0c69ea798d08a915ba4478113efa9e694971e410056392f4526d796f136d3fa", size = 343452, upload-time = "2025-10-17T11:31:08.259Z" }, + { url = "https://files.pythonhosted.org/packages/a6/bc/950dd7f170c6394b6fdd73f989d9e729bd98907bcc4430ef080a72d06b77/jiter-0.11.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:0d4d6993edc83cf75e8c6828a8d6ce40a09ee87e38c7bfba6924f39e1337e21d", size = 302626, upload-time = "2025-10-17T11:31:09.645Z" }, + { url = "https://files.pythonhosted.org/packages/3a/65/43d7971ca82ee100b7b9b520573eeef7eabc0a45d490168ebb9a9b5bb8b2/jiter-0.11.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f78d151c83a87a6cf5461d5ee55bc730dd9ae227377ac6f115b922989b95f838", size = 297034, upload-time = "2025-10-17T11:31:10.975Z" }, + { url = "https://files.pythonhosted.org/packages/19/4c/000e1e0c0c67e96557a279f8969487ea2732d6c7311698819f977abae837/jiter-0.11.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9022974781155cd5521d5cb10997a03ee5e31e8454c9d999dcdccd253f2353f", size = 337328, upload-time = "2025-10-17T11:31:12.399Z" }, + { url = "https://files.pythonhosted.org/packages/d9/71/71408b02c6133153336d29fa3ba53000f1e1a3f78bb2fc2d1a1865d2e743/jiter-0.11.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18c77aaa9117510d5bdc6a946baf21b1f0cfa58ef04d31c8d016f206f2118960", size = 343697, upload-time = "2025-10-17T11:31:13.773Z" }, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonpointer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/78/18813351fe5d63acad16aec57f94ec2b70a09e53ca98145589e185423873/jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c", size = 21699, upload-time = "2023-06-26T12:07:29.144Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/07/02e16ed01e04a374e644b575638ec7987ae846d25ad97bcc9945a3ee4b0e/jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade", size = 12898, upload-time = "2023-06-16T21:01:28.466Z" }, +] + +[[package]] +name = "jsonpointer" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, +] + +[[package]] +name = "langchain" +version = "1.0.3" +source = { editable = "../langchain_v1" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph" }, + { name = "pydantic" }, +] + +[package.optional-dependencies] +openai = [ + { name = "langchain-openai" }, +] + +[package.metadata] +requires-dist = [ + { name = "langchain-anthropic", marker = "extra == 'anthropic'" }, + { name = "langchain-aws", marker = "extra == 'aws'" }, + { name = "langchain-azure-ai", marker = "extra == 'azure-ai'" }, + { name = "langchain-community", marker = "extra == 'community'" }, + { name = "langchain-core", editable = "../core" }, + { name = "langchain-deepseek", marker = "extra == 'deepseek'" }, + { name = "langchain-fireworks", marker = "extra == 'fireworks'" }, + { name = "langchain-google-genai", marker = "extra == 'google-genai'" }, + { name = "langchain-google-vertexai", marker = "extra == 'google-vertexai'" }, + { name = "langchain-groq", marker = "extra == 'groq'" }, + { name = "langchain-huggingface", marker = "extra == 'huggingface'" }, + { name = "langchain-mistralai", marker = "extra == 'mistralai'" }, + { name = "langchain-model-profiles", marker = "extra == 'model-profiles'" }, + { name = "langchain-ollama", marker = "extra == 'ollama'" }, + { name = "langchain-openai", marker = "extra == 'openai'", editable = "../partners/openai" }, + { name = "langchain-perplexity", marker = "extra == 'perplexity'" }, + { name = "langchain-together", marker = "extra == 'together'" }, + { name = "langchain-xai", marker = "extra == 'xai'" }, + { name = "langgraph", specifier = ">=1.0.2,<1.1.0" }, + { name = "pydantic", specifier = ">=2.7.4,<3.0.0" }, +] +provides-extras = ["model-profiles", "community", "anthropic", "openai", "azure-ai", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai", "perplexity"] + +[package.metadata.requires-dev] +lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13.0" }] +test = [ + { name = "langchain-openai", editable = "../partners/openai" }, + { name = "langchain-tests", editable = "../standard-tests" }, + { name = "pytest", specifier = ">=8.0.0,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.23.2,<2.0.0" }, + { name = "pytest-cov", specifier = ">=4.0.0,<8.0.0" }, + { name = "pytest-mock" }, + { name = "pytest-socket", specifier = ">=0.6.0,<1.0.0" }, + { name = "pytest-watcher", specifier = ">=0.2.6,<1.0.0" }, + { name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" }, + { name = "syrupy", specifier = ">=4.0.2,<5.0.0" }, + { name = "toml", specifier = ">=0.10.2,<1.0.0" }, +] +test-integration = [ + { name = "cassio", specifier = ">=0.1.0,<1.0.0" }, + { name = "langchain-core", editable = "../core" }, + { name = "langchain-text-splitters", editable = "../text-splitters" }, + { name = "langchainhub", specifier = ">=0.1.16,<1.0.0" }, + { name = "python-dotenv", specifier = ">=1.0.0,<2.0.0" }, + { name = "vcrpy", specifier = ">=7.0.0,<8.0.0" }, + { name = "wrapt", specifier = ">=1.15.0,<2.0.0" }, +] +typing = [ + { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, + { name = "types-toml", specifier = ">=0.10.8.20240310,<1.0.0.0" }, +] + +[[package]] +name = "langchain-core" +version = "1.0.3" +source = { editable = "../core" } +dependencies = [ + { name = "jsonpatch" }, + { name = "langsmith" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "tenacity" }, + { name = "typing-extensions" }, +] + +[package.metadata] +requires-dist = [ + { name = "jsonpatch", specifier = ">=1.33.0,<2.0.0" }, + { name = "langsmith", specifier = ">=0.3.45,<1.0.0" }, + { name = "packaging", specifier = ">=23.2.0,<26.0.0" }, + { name = "pydantic", specifier = ">=2.7.4,<3.0.0" }, + { name = "pyyaml", specifier = ">=5.3.0,<7.0.0" }, + { name = "tenacity", specifier = ">=8.1.0,!=8.4.0,<10.0.0" }, + { name = "typing-extensions", specifier = ">=4.7.0,<5.0.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "grandalf", specifier = ">=0.8.0,<1.0.0" }, + { name = "jupyter", specifier = ">=1.0.0,<2.0.0" }, + { name = "setuptools", specifier = ">=67.6.1,<68.0.0" }, +] +lint = [{ name = "ruff", specifier = ">=0.13.1,<0.14.0" }] +test = [ + { name = "blockbuster", specifier = ">=1.5.18,<1.6.0" }, + { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, + { name = "grandalf", specifier = ">=0.8.0,<1.0.0" }, + { name = "langchain-model-profiles", directory = "." }, + { name = "langchain-tests", directory = "../standard-tests" }, + { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, + { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, + { name = "pytest", specifier = ">=8.0.0,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" }, + { name = "pytest-benchmark" }, + { name = "pytest-codspeed" }, + { name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" }, + { name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" }, + { name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" }, + { name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" }, + { name = "responses", specifier = ">=0.25.0,<1.0.0" }, + { name = "syrupy", specifier = ">=4.0.2,<5.0.0" }, +] +test-integration = [] +typing = [ + { name = "langchain-model-profiles", directory = "." }, + { name = "langchain-text-splitters", directory = "../text-splitters" }, + { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, + { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, + { name = "types-requests", specifier = ">=2.28.11.5,<3.0.0.0" }, +] + +[[package]] +name = "langchain-model-profiles" +version = "0.0.3" +source = { editable = "." } +dependencies = [ + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] + +[package.dev-dependencies] +dev = [ + { name = "httpx" }, +] +lint = [ + { name = "langchain" }, + { name = "ruff" }, +] +test = [ + { name = "langchain", extra = ["openai"] }, + { name = "langchain-core" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-mock" }, + { name = "pytest-socket" }, + { name = "pytest-watcher" }, + { name = "pytest-xdist" }, + { name = "syrupy" }, + { name = "toml" }, +] +test-integration = [ + { name = "langchain-core" }, +] +typing = [ + { name = "mypy" }, + { name = "types-toml" }, +] + +[package.metadata] +requires-dist = [ + { name = "tomli", marker = "python_full_version < '3.11'", specifier = ">=2.0.0,<3.0.0" }, + { name = "typing-extensions", specifier = ">=4.7.0,<5.0.0" }, +] + +[package.metadata.requires-dev] +dev = [{ name = "httpx", specifier = ">=0.23.0,<1" }] +lint = [ + { name = "langchain", editable = "../langchain_v1" }, + { name = "ruff", specifier = ">=0.12.2,<0.13.0" }, +] +test = [ + { name = "langchain", extras = ["openai"], editable = "../langchain_v1" }, + { name = "langchain-core", editable = "../core" }, + { name = "pytest", specifier = ">=8.0.0,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.23.2,<2.0.0" }, + { name = "pytest-cov", specifier = ">=4.0.0,<8.0.0" }, + { name = "pytest-mock" }, + { name = "pytest-socket", specifier = ">=0.6.0,<1.0.0" }, + { name = "pytest-watcher", specifier = ">=0.2.6,<1.0.0" }, + { name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" }, + { name = "syrupy", specifier = ">=4.0.2,<5.0.0" }, + { name = "toml", specifier = ">=0.10.2,<1.0.0" }, +] +test-integration = [{ name = "langchain-core", editable = "../core" }] +typing = [ + { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, + { name = "types-toml", specifier = ">=0.10.8.20240310,<1.0.0.0" }, +] + +[[package]] +name = "langchain-openai" +version = "1.0.2" +source = { editable = "../partners/openai" } +dependencies = [ + { name = "langchain-core" }, + { name = "openai" }, + { name = "tiktoken" }, +] + +[package.metadata] +requires-dist = [ + { name = "langchain-core", editable = "../core" }, + { name = "openai", specifier = ">=1.109.1,<3.0.0" }, + { name = "tiktoken", specifier = ">=0.7.0,<1.0.0" }, +] + +[package.metadata.requires-dev] +dev = [{ name = "langchain-core", editable = "../core" }] +lint = [{ name = "ruff", specifier = ">=0.13.1,<0.14.0" }] +test = [ + { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, + { name = "langchain", editable = "../langchain_v1" }, + { name = "langchain-core", editable = "../core" }, + { name = "langchain-tests", editable = "../standard-tests" }, + { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, + { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, + { name = "pytest", specifier = ">=7.3.0,<8.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" }, + { name = "pytest-cov", specifier = ">=4.1.0,<5.0.0" }, + { name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" }, + { name = "pytest-retry", specifier = ">=1.7.0,<1.8.0" }, + { name = "pytest-socket", specifier = ">=0.6.0,<1.0.0" }, + { name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" }, + { name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" }, + { name = "syrupy", specifier = ">=4.0.2,<5.0.0" }, + { name = "vcrpy", specifier = ">=7.0.0,<8.0.0" }, +] +test-integration = [ + { name = "httpx", specifier = ">=0.27.0,<1.0.0" }, + { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, + { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, + { name = "pillow", specifier = ">=10.3.0,<12.0.0" }, +] +typing = [ + { name = "langchain-core", editable = "../core" }, + { name = "mypy", specifier = ">=1.17.1,<2.0.0" }, + { name = "types-tqdm", specifier = ">=4.66.0.5,<5.0.0.0" }, +] + +[[package]] +name = "langgraph" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph-checkpoint" }, + { name = "langgraph-prebuilt" }, + { name = "langgraph-sdk" }, + { name = "pydantic" }, + { name = "xxhash" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/25/18e6e056ee1a8af64fcab441b4a3f2e158399935b08f148c7718fc42ecdb/langgraph-1.0.2.tar.gz", hash = "sha256:dae1af08d6025cb1fcaed68f502c01af7d634d9044787c853a46c791cfc52f67", size = 482660, upload-time = "2025-10-29T18:38:28.374Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/b1/9f4912e13d4ed691f2685c8a4b764b5a9237a30cca0c5782bc213d9f0a9a/langgraph-1.0.2-py3-none-any.whl", hash = "sha256:b3d56b8c01de857b5fb1da107e8eab6e30512a377685eeedb4f76456724c9729", size = 156751, upload-time = "2025-10-29T18:38:26.577Z" }, +] + +[[package]] +name = "langgraph-checkpoint" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "ormsgpack" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b7/cb/2a6dad2f0a14317580cc122e2a60e7f0ecabb50aaa6dc5b7a6a2c94cead7/langgraph_checkpoint-3.0.0.tar.gz", hash = "sha256:f738695ad938878d8f4775d907d9629e9fcd345b1950196effb08f088c52369e", size = 132132, upload-time = "2025-10-20T18:35:49.132Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/2a/2efe0b5a72c41e3a936c81c5f5d8693987a1b260287ff1bbebaae1b7b888/langgraph_checkpoint-3.0.0-py3-none-any.whl", hash = "sha256:560beb83e629784ab689212a3d60834fb3196b4bbe1d6ac18e5cad5d85d46010", size = 46060, upload-time = "2025-10-20T18:35:48.255Z" }, +] + +[[package]] +name = "langgraph-prebuilt" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph-checkpoint" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/2f/b940590436e07b3450fe6d791aad5e581363ad536c4f1771e3ba46530268/langgraph_prebuilt-1.0.2.tar.gz", hash = "sha256:9896dbabf04f086eb59df4294f54ab5bdb21cd78e27e0a10e695dffd1cc6097d", size = 142075, upload-time = "2025-10-29T18:29:00.401Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/2f/9a7d00d4afa036e65294059c7c912002fb72ba5dbbd5c2a871ca06360278/langgraph_prebuilt-1.0.2-py3-none-any.whl", hash = "sha256:d9499f7c449fb637ee7b87e3f6a3b74095f4202053c74d33894bd839ea4c57c7", size = 34286, upload-time = "2025-10-29T18:28:59.26Z" }, +] + +[[package]] +name = "langgraph-sdk" +version = "0.2.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "orjson" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/d8/40e01190a73c564a4744e29a6c902f78d34d43dad9b652a363a92a67059c/langgraph_sdk-0.2.9.tar.gz", hash = "sha256:b3bd04c6be4fa382996cd2be8fbc1e7cc94857d2bc6b6f4599a7f2a245975303", size = 99802, upload-time = "2025-09-20T18:49:14.734Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/05/b2d34e16638241e6f27a6946d28160d4b8b641383787646d41a3727e0896/langgraph_sdk-0.2.9-py3-none-any.whl", hash = "sha256:fbf302edadbf0fb343596f91c597794e936ef68eebc0d3e1d358b6f9f72a1429", size = 56752, upload-time = "2025-09-20T18:49:13.346Z" }, +] + +[[package]] +name = "langsmith" +version = "0.4.38" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "orjson", marker = "platform_python_implementation != 'PyPy'" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "requests-toolbelt" }, + { name = "zstandard" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/37/21/f1ba48412c64bf3bb8feb532fc9d247b396935b5d8242332d44a4195ec2d/langsmith-0.4.38.tar.gz", hash = "sha256:3aa57f9c16a5880256cd1eab0452533c1fb5ee14ec5250e23ed919cc2b07f6d3", size = 942789, upload-time = "2025-10-23T22:28:20.458Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/2b/7e0248f65e35800ea8e4e3dbb3bcc36c61b81f5b8abeddaceec8320ab491/langsmith-0.4.38-py3-none-any.whl", hash = "sha256:326232a24b1c6dd308a3188557cc023adf8fb14144263b2982c115a6be5141e7", size = 397341, upload-time = "2025-10-23T22:28:18.333Z" }, +] + +[[package]] +name = "mypy" +version = "1.18.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/6f/657961a0743cff32e6c0611b63ff1c1970a0b482ace35b069203bf705187/mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c", size = 12807973, upload-time = "2025-09-19T00:10:35.282Z" }, + { url = "https://files.pythonhosted.org/packages/10/e9/420822d4f661f13ca8900f5fa239b40ee3be8b62b32f3357df9a3045a08b/mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e", size = 11896527, upload-time = "2025-09-19T00:10:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/aa/73/a05b2bbaa7005f4642fcfe40fb73f2b4fb6bb44229bd585b5878e9a87ef8/mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b", size = 12507004, upload-time = "2025-09-19T00:11:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/4f/01/f6e4b9f0d031c11ccbd6f17da26564f3a0f3c4155af344006434b0a05a9d/mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66", size = 13245947, upload-time = "2025-09-19T00:10:46.923Z" }, + { url = "https://files.pythonhosted.org/packages/d7/97/19727e7499bfa1ae0773d06afd30ac66a58ed7437d940c70548634b24185/mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428", size = 13499217, upload-time = "2025-09-19T00:09:39.472Z" }, + { url = "https://files.pythonhosted.org/packages/9f/4f/90dc8c15c1441bf31cf0f9918bb077e452618708199e530f4cbd5cede6ff/mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed", size = 9766753, upload-time = "2025-09-19T00:10:49.161Z" }, + { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, + { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, + { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, + { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, + { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "openai" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/44/303deb97be7c1c9b53118b52825cbd1557aeeff510f3a52566b1fa66f6a2/openai-2.6.1.tar.gz", hash = "sha256:27ae704d190615fca0c0fc2b796a38f8b5879645a3a52c9c453b23f97141bb49", size = 593043, upload-time = "2025-10-24T13:29:52.79Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/0e/331df43df633e6105ff9cf45e0ce57762bd126a45ac16b25a43f6738d8a2/openai-2.6.1-py3-none-any.whl", hash = "sha256:904e4b5254a8416746a2f05649594fa41b19d799843cd134dac86167e094edef", size = 1005551, upload-time = "2025-10-24T13:29:50.973Z" }, +] + +[[package]] +name = "orjson" +version = "3.11.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188, upload-time = "2025-10-24T15:50:38.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/30/5aed63d5af1c8b02fbd2a8d83e2a6c8455e30504c50dbf08c8b51403d873/orjson-3.11.4-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e3aa2118a3ece0d25489cbe48498de8a5d580e42e8d9979f65bf47900a15aba1", size = 243870, upload-time = "2025-10-24T15:48:28.908Z" }, + { url = "https://files.pythonhosted.org/packages/44/1f/da46563c08bef33c41fd63c660abcd2184b4d2b950c8686317d03b9f5f0c/orjson-3.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a69ab657a4e6733133a3dca82768f2f8b884043714e8d2b9ba9f52b6efef5c44", size = 130622, upload-time = "2025-10-24T15:48:31.361Z" }, + { url = "https://files.pythonhosted.org/packages/02/bd/b551a05d0090eab0bf8008a13a14edc0f3c3e0236aa6f5b697760dd2817b/orjson-3.11.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3740bffd9816fc0326ddc406098a3a8f387e42223f5f455f2a02a9f834ead80c", size = 129344, upload-time = "2025-10-24T15:48:32.71Z" }, + { url = "https://files.pythonhosted.org/packages/87/6c/9ddd5e609f443b2548c5e7df3c44d0e86df2c68587a0e20c50018cdec535/orjson-3.11.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65fd2f5730b1bf7f350c6dc896173d3460d235c4be007af73986d7cd9a2acd23", size = 136633, upload-time = "2025-10-24T15:48:34.128Z" }, + { url = "https://files.pythonhosted.org/packages/95/f2/9f04f2874c625a9fb60f6918c33542320661255323c272e66f7dcce14df2/orjson-3.11.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fdc3ae730541086158d549c97852e2eea6820665d4faf0f41bf99df41bc11ea", size = 137695, upload-time = "2025-10-24T15:48:35.654Z" }, + { url = "https://files.pythonhosted.org/packages/d2/c2/c7302afcbdfe8a891baae0e2cee091583a30e6fa613e8bdf33b0e9c8a8c7/orjson-3.11.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e10b4d65901da88845516ce9f7f9736f9638d19a1d483b3883dc0182e6e5edba", size = 136879, upload-time = "2025-10-24T15:48:37.483Z" }, + { url = "https://files.pythonhosted.org/packages/c6/3a/b31c8f0182a3e27f48e703f46e61bb769666cd0dac4700a73912d07a1417/orjson-3.11.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6a03a678085f64b97f9d4a9ae69376ce91a3a9e9b56a82b1580d8e1d501aff", size = 136374, upload-time = "2025-10-24T15:48:38.624Z" }, + { url = "https://files.pythonhosted.org/packages/29/d0/fd9ab96841b090d281c46df566b7f97bc6c8cd9aff3f3ebe99755895c406/orjson-3.11.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c82e4f0b1c712477317434761fbc28b044c838b6b1240d895607441412371ac", size = 140519, upload-time = "2025-10-24T15:48:39.756Z" }, + { url = "https://files.pythonhosted.org/packages/d6/ce/36eb0f15978bb88e33a3480e1a3fb891caa0f189ba61ce7713e0ccdadabf/orjson-3.11.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d58c166a18f44cc9e2bad03a327dc2d1a3d2e85b847133cfbafd6bfc6719bd79", size = 406522, upload-time = "2025-10-24T15:48:41.198Z" }, + { url = "https://files.pythonhosted.org/packages/85/11/e8af3161a288f5c6a00c188fc729c7ba193b0cbc07309a1a29c004347c30/orjson-3.11.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94f206766bf1ea30e1382e4890f763bd1eefddc580e08fec1ccdc20ddd95c827", size = 149790, upload-time = "2025-10-24T15:48:42.664Z" }, + { url = "https://files.pythonhosted.org/packages/ea/96/209d52db0cf1e10ed48d8c194841e383e23c2ced5a2ee766649fe0e32d02/orjson-3.11.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:41bf25fb39a34cf8edb4398818523277ee7096689db352036a9e8437f2f3ee6b", size = 140040, upload-time = "2025-10-24T15:48:44.042Z" }, + { url = "https://files.pythonhosted.org/packages/ef/0e/526db1395ccb74c3d59ac1660b9a325017096dc5643086b38f27662b4add/orjson-3.11.4-cp310-cp310-win32.whl", hash = "sha256:fa9627eba4e82f99ca6d29bc967f09aba446ee2b5a1ea728949ede73d313f5d3", size = 135955, upload-time = "2025-10-24T15:48:45.495Z" }, + { url = "https://files.pythonhosted.org/packages/e6/69/18a778c9de3702b19880e73c9866b91cc85f904b885d816ba1ab318b223c/orjson-3.11.4-cp310-cp310-win_amd64.whl", hash = "sha256:23ef7abc7fca96632d8174ac115e668c1e931b8fe4dde586e92a500bf1914dcc", size = 131577, upload-time = "2025-10-24T15:48:46.609Z" }, + { url = "https://files.pythonhosted.org/packages/63/1d/1ea6005fffb56715fd48f632611e163d1604e8316a5bad2288bee9a1c9eb/orjson-3.11.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e59d23cd93ada23ec59a96f215139753fbfe3a4d989549bcb390f8c00370b39", size = 243498, upload-time = "2025-10-24T15:48:48.101Z" }, + { url = "https://files.pythonhosted.org/packages/37/d7/ffed10c7da677f2a9da307d491b9eb1d0125b0307019c4ad3d665fd31f4f/orjson-3.11.4-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5c3aedecfc1beb988c27c79d52ebefab93b6c3921dbec361167e6559aba2d36d", size = 128961, upload-time = "2025-10-24T15:48:49.571Z" }, + { url = "https://files.pythonhosted.org/packages/a2/96/3e4d10a18866d1368f73c8c44b7fe37cc8a15c32f2a7620be3877d4c55a3/orjson-3.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9e5301f1c2caa2a9a4a303480d79c9ad73560b2e7761de742ab39fe59d9175", size = 130321, upload-time = "2025-10-24T15:48:50.713Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1f/465f66e93f434f968dd74d5b623eb62c657bdba2332f5a8be9f118bb74c7/orjson-3.11.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8873812c164a90a79f65368f8f96817e59e35d0cc02786a5356f0e2abed78040", size = 129207, upload-time = "2025-10-24T15:48:52.193Z" }, + { url = "https://files.pythonhosted.org/packages/28/43/d1e94837543321c119dff277ae8e348562fe8c0fafbb648ef7cb0c67e521/orjson-3.11.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d7feb0741ebb15204e748f26c9638e6665a5fa93c37a2c73d64f1669b0ddc63", size = 136323, upload-time = "2025-10-24T15:48:54.806Z" }, + { url = "https://files.pythonhosted.org/packages/bf/04/93303776c8890e422a5847dd012b4853cdd88206b8bbd3edc292c90102d1/orjson-3.11.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ee5487fefee21e6910da4c2ee9eef005bee568a0879834df86f888d2ffbdd9", size = 137440, upload-time = "2025-10-24T15:48:56.326Z" }, + { url = "https://files.pythonhosted.org/packages/1e/ef/75519d039e5ae6b0f34d0336854d55544ba903e21bf56c83adc51cd8bf82/orjson-3.11.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d40d46f348c0321df01507f92b95a377240c4ec31985225a6668f10e2676f9a", size = 136680, upload-time = "2025-10-24T15:48:57.476Z" }, + { url = "https://files.pythonhosted.org/packages/b5/18/bf8581eaae0b941b44efe14fee7b7862c3382fbc9a0842132cfc7cf5ecf4/orjson-3.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95713e5fc8af84d8edc75b785d2386f653b63d62b16d681687746734b4dfc0be", size = 136160, upload-time = "2025-10-24T15:48:59.631Z" }, + { url = "https://files.pythonhosted.org/packages/c4/35/a6d582766d351f87fc0a22ad740a641b0a8e6fc47515e8614d2e4790ae10/orjson-3.11.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad73ede24f9083614d6c4ca9a85fe70e33be7bf047ec586ee2363bc7418fe4d7", size = 140318, upload-time = "2025-10-24T15:49:00.834Z" }, + { url = "https://files.pythonhosted.org/packages/76/b3/5a4801803ab2e2e2d703bce1a56540d9f99a9143fbec7bf63d225044fef8/orjson-3.11.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:842289889de515421f3f224ef9c1f1efb199a32d76d8d2ca2706fa8afe749549", size = 406330, upload-time = "2025-10-24T15:49:02.327Z" }, + { url = "https://files.pythonhosted.org/packages/80/55/a8f682f64833e3a649f620eafefee175cbfeb9854fc5b710b90c3bca45df/orjson-3.11.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3b2427ed5791619851c52a1261b45c233930977e7de8cf36de05636c708fa905", size = 149580, upload-time = "2025-10-24T15:49:03.517Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e4/c132fa0c67afbb3eb88274fa98df9ac1f631a675e7877037c611805a4413/orjson-3.11.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c36e524af1d29982e9b190573677ea02781456b2e537d5840e4538a5ec41907", size = 139846, upload-time = "2025-10-24T15:49:04.761Z" }, + { url = "https://files.pythonhosted.org/packages/54/06/dc3491489efd651fef99c5908e13951abd1aead1257c67f16135f95ce209/orjson-3.11.4-cp311-cp311-win32.whl", hash = "sha256:87255b88756eab4a68ec61837ca754e5d10fa8bc47dc57f75cedfeaec358d54c", size = 135781, upload-time = "2025-10-24T15:49:05.969Z" }, + { url = "https://files.pythonhosted.org/packages/79/b7/5e5e8d77bd4ea02a6ac54c42c818afb01dd31961be8a574eb79f1d2cfb1e/orjson-3.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:e2d5d5d798aba9a0e1fede8d853fa899ce2cb930ec0857365f700dffc2c7af6a", size = 131391, upload-time = "2025-10-24T15:49:07.355Z" }, + { url = "https://files.pythonhosted.org/packages/0f/dc/9484127cc1aa213be398ed735f5f270eedcb0c0977303a6f6ddc46b60204/orjson-3.11.4-cp311-cp311-win_arm64.whl", hash = "sha256:6bb6bb41b14c95d4f2702bce9975fda4516f1db48e500102fc4d8119032ff045", size = 126252, upload-time = "2025-10-24T15:49:08.869Z" }, + { url = "https://files.pythonhosted.org/packages/63/51/6b556192a04595b93e277a9ff71cd0cc06c21a7df98bcce5963fa0f5e36f/orjson-3.11.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4371de39319d05d3f482f372720b841c841b52f5385bd99c61ed69d55d9ab50", size = 243571, upload-time = "2025-10-24T15:49:10.008Z" }, + { url = "https://files.pythonhosted.org/packages/1c/2c/2602392ddf2601d538ff11848b98621cd465d1a1ceb9db9e8043181f2f7b/orjson-3.11.4-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:e41fd3b3cac850eaae78232f37325ed7d7436e11c471246b87b2cd294ec94853", size = 128891, upload-time = "2025-10-24T15:49:11.297Z" }, + { url = "https://files.pythonhosted.org/packages/4e/47/bf85dcf95f7a3a12bf223394a4f849430acd82633848d52def09fa3f46ad/orjson-3.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600e0e9ca042878c7fdf189cf1b028fe2c1418cc9195f6cb9824eb6ed99cb938", size = 130137, upload-time = "2025-10-24T15:49:12.544Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/a0cb31007f3ab6f1fd2a1b17057c7c349bc2baf8921a85c0180cc7be8011/orjson-3.11.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7bbf9b333f1568ef5da42bc96e18bf30fd7f8d54e9ae066d711056add508e415", size = 129152, upload-time = "2025-10-24T15:49:13.754Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ef/2811def7ce3d8576b19e3929fff8f8f0d44bc5eb2e0fdecb2e6e6cc6c720/orjson-3.11.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806363144bb6e7297b8e95870e78d30a649fdc4e23fc84daa80c8ebd366ce44", size = 136834, upload-time = "2025-10-24T15:49:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/00/d4/9aee9e54f1809cec8ed5abd9bc31e8a9631d19460e3b8470145d25140106/orjson-3.11.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad355e8308493f527d41154e9053b86a5be892b3b359a5c6d5d95cda23601cb2", size = 137519, upload-time = "2025-10-24T15:49:16.557Z" }, + { url = "https://files.pythonhosted.org/packages/db/ea/67bfdb5465d5679e8ae8d68c11753aaf4f47e3e7264bad66dc2f2249e643/orjson-3.11.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a7517482667fb9f0ff1b2f16fe5829296ed7a655d04d68cd9711a4d8a4e708", size = 136749, upload-time = "2025-10-24T15:49:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/01/7e/62517dddcfce6d53a39543cd74d0dccfcbdf53967017c58af68822100272/orjson-3.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97eb5942c7395a171cbfecc4ef6701fc3c403e762194683772df4c54cfbb2210", size = 136325, upload-time = "2025-10-24T15:49:19.347Z" }, + { url = "https://files.pythonhosted.org/packages/18/ae/40516739f99ab4c7ec3aaa5cc242d341fcb03a45d89edeeaabc5f69cb2cf/orjson-3.11.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:149d95d5e018bdd822e3f38c103b1a7c91f88d38a88aada5c4e9b3a73a244241", size = 140204, upload-time = "2025-10-24T15:49:20.545Z" }, + { url = "https://files.pythonhosted.org/packages/82/18/ff5734365623a8916e3a4037fcef1cd1782bfc14cf0992afe7940c5320bf/orjson-3.11.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:624f3951181eb46fc47dea3d221554e98784c823e7069edb5dbd0dc826ac909b", size = 406242, upload-time = "2025-10-24T15:49:21.884Z" }, + { url = "https://files.pythonhosted.org/packages/e1/43/96436041f0a0c8c8deca6a05ebeaf529bf1de04839f93ac5e7c479807aec/orjson-3.11.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:03bfa548cf35e3f8b3a96c4e8e41f753c686ff3d8e182ce275b1751deddab58c", size = 150013, upload-time = "2025-10-24T15:49:23.185Z" }, + { url = "https://files.pythonhosted.org/packages/1b/48/78302d98423ed8780479a1e682b9aecb869e8404545d999d34fa486e573e/orjson-3.11.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:525021896afef44a68148f6ed8a8bf8375553d6066c7f48537657f64823565b9", size = 139951, upload-time = "2025-10-24T15:49:24.428Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7b/ad613fdcdaa812f075ec0875143c3d37f8654457d2af17703905425981bf/orjson-3.11.4-cp312-cp312-win32.whl", hash = "sha256:b58430396687ce0f7d9eeb3dd47761ca7d8fda8e9eb92b3077a7a353a75efefa", size = 136049, upload-time = "2025-10-24T15:49:25.973Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3c/9cf47c3ff5f39b8350fb21ba65d789b6a1129d4cbb3033ba36c8a9023520/orjson-3.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:c6dbf422894e1e3c80a177133c0dda260f81428f9de16d61041949f6a2e5c140", size = 131461, upload-time = "2025-10-24T15:49:27.259Z" }, + { url = "https://files.pythonhosted.org/packages/c6/3b/e2425f61e5825dc5b08c2a5a2b3af387eaaca22a12b9c8c01504f8614c36/orjson-3.11.4-cp312-cp312-win_arm64.whl", hash = "sha256:d38d2bc06d6415852224fcc9c0bfa834c25431e466dc319f0edd56cca81aa96e", size = 126167, upload-time = "2025-10-24T15:49:28.511Z" }, + { url = "https://files.pythonhosted.org/packages/23/15/c52aa7112006b0f3d6180386c3a46ae057f932ab3425bc6f6ac50431cca1/orjson-3.11.4-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2d6737d0e616a6e053c8b4acc9eccea6b6cce078533666f32d140e4f85002534", size = 243525, upload-time = "2025-10-24T15:49:29.737Z" }, + { url = "https://files.pythonhosted.org/packages/ec/38/05340734c33b933fd114f161f25a04e651b0c7c33ab95e9416ade5cb44b8/orjson-3.11.4-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:afb14052690aa328cc118a8e09f07c651d301a72e44920b887c519b313d892ff", size = 128871, upload-time = "2025-10-24T15:49:31.109Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/ae8d34899ff0c012039b5a7cb96a389b2476e917733294e498586b45472d/orjson-3.11.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38aa9e65c591febb1b0aed8da4d469eba239d434c218562df179885c94e1a3ad", size = 130055, upload-time = "2025-10-24T15:49:33.382Z" }, + { url = "https://files.pythonhosted.org/packages/33/aa/6346dd5073730451bee3681d901e3c337e7ec17342fb79659ec9794fc023/orjson-3.11.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f2cf4dfaf9163b0728d061bebc1e08631875c51cd30bf47cb9e3293bfbd7dcd5", size = 129061, upload-time = "2025-10-24T15:49:34.935Z" }, + { url = "https://files.pythonhosted.org/packages/39/e4/8eea51598f66a6c853c380979912d17ec510e8e66b280d968602e680b942/orjson-3.11.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89216ff3dfdde0e4070932e126320a1752c9d9a758d6a32ec54b3b9334991a6a", size = 136541, upload-time = "2025-10-24T15:49:36.923Z" }, + { url = "https://files.pythonhosted.org/packages/9a/47/cb8c654fa9adcc60e99580e17c32b9e633290e6239a99efa6b885aba9dbc/orjson-3.11.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9daa26ca8e97fae0ce8aa5d80606ef8f7914e9b129b6b5df9104266f764ce436", size = 137535, upload-time = "2025-10-24T15:49:38.307Z" }, + { url = "https://files.pythonhosted.org/packages/43/92/04b8cc5c2b729f3437ee013ce14a60ab3d3001465d95c184758f19362f23/orjson-3.11.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c8b2769dc31883c44a9cd126560327767f848eb95f99c36c9932f51090bfce9", size = 136703, upload-time = "2025-10-24T15:49:40.795Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fd/d0733fcb9086b8be4ebcfcda2d0312865d17d0d9884378b7cffb29d0763f/orjson-3.11.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1469d254b9884f984026bd9b0fa5bbab477a4bfe558bba6848086f6d43eb5e73", size = 136293, upload-time = "2025-10-24T15:49:42.347Z" }, + { url = "https://files.pythonhosted.org/packages/c2/d7/3c5514e806837c210492d72ae30ccf050ce3f940f45bf085bab272699ef4/orjson-3.11.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:68e44722541983614e37117209a194e8c3ad07838ccb3127d96863c95ec7f1e0", size = 140131, upload-time = "2025-10-24T15:49:43.638Z" }, + { url = "https://files.pythonhosted.org/packages/9c/dd/ba9d32a53207babf65bd510ac4d0faaa818bd0df9a9c6f472fe7c254f2e3/orjson-3.11.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:8e7805fda9672c12be2f22ae124dcd7b03928d6c197544fe12174b86553f3196", size = 406164, upload-time = "2025-10-24T15:49:45.498Z" }, + { url = "https://files.pythonhosted.org/packages/8e/f9/f68ad68f4af7c7bde57cd514eaa2c785e500477a8bc8f834838eb696a685/orjson-3.11.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04b69c14615fb4434ab867bf6f38b2d649f6f300af30a6705397e895f7aec67a", size = 149859, upload-time = "2025-10-24T15:49:46.981Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d2/7f847761d0c26818395b3d6b21fb6bc2305d94612a35b0a30eae65a22728/orjson-3.11.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:639c3735b8ae7f970066930e58cf0ed39a852d417c24acd4a25fc0b3da3c39a6", size = 139926, upload-time = "2025-10-24T15:49:48.321Z" }, + { url = "https://files.pythonhosted.org/packages/9f/37/acd14b12dc62db9a0e1d12386271b8661faae270b22492580d5258808975/orjson-3.11.4-cp313-cp313-win32.whl", hash = "sha256:6c13879c0d2964335491463302a6ca5ad98105fc5db3565499dcb80b1b4bd839", size = 136007, upload-time = "2025-10-24T15:49:49.938Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a9/967be009ddf0a1fffd7a67de9c36656b28c763659ef91352acc02cbe364c/orjson-3.11.4-cp313-cp313-win_amd64.whl", hash = "sha256:09bf242a4af98732db9f9a1ec57ca2604848e16f132e3f72edfd3c5c96de009a", size = 131314, upload-time = "2025-10-24T15:49:51.248Z" }, + { url = "https://files.pythonhosted.org/packages/cb/db/399abd6950fbd94ce125cb8cd1a968def95174792e127b0642781e040ed4/orjson-3.11.4-cp313-cp313-win_arm64.whl", hash = "sha256:a85f0adf63319d6c1ba06fb0dbf997fced64a01179cf17939a6caca662bf92de", size = 126152, upload-time = "2025-10-24T15:49:52.922Z" }, + { url = "https://files.pythonhosted.org/packages/25/e3/54ff63c093cc1697e758e4fceb53164dd2661a7d1bcd522260ba09f54533/orjson-3.11.4-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:42d43a1f552be1a112af0b21c10a5f553983c2a0938d2bbb8ecd8bc9fb572803", size = 243501, upload-time = "2025-10-24T15:49:54.288Z" }, + { url = "https://files.pythonhosted.org/packages/ac/7d/e2d1076ed2e8e0ae9badca65bf7ef22710f93887b29eaa37f09850604e09/orjson-3.11.4-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:26a20f3fbc6c7ff2cb8e89c4c5897762c9d88cf37330c6a117312365d6781d54", size = 128862, upload-time = "2025-10-24T15:49:55.961Z" }, + { url = "https://files.pythonhosted.org/packages/9f/37/ca2eb40b90621faddfa9517dfe96e25f5ae4d8057a7c0cdd613c17e07b2c/orjson-3.11.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e3f20be9048941c7ffa8fc523ccbd17f82e24df1549d1d1fe9317712d19938e", size = 130047, upload-time = "2025-10-24T15:49:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/c7/62/1021ed35a1f2bad9040f05fa4cc4f9893410df0ba3eaa323ccf899b1c90a/orjson-3.11.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aac364c758dc87a52e68e349924d7e4ded348dedff553889e4d9f22f74785316", size = 129073, upload-time = "2025-10-24T15:49:58.782Z" }, + { url = "https://files.pythonhosted.org/packages/e8/3f/f84d966ec2a6fd5f73b1a707e7cd876813422ae4bf9f0145c55c9c6a0f57/orjson-3.11.4-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5c54a6d76e3d741dcc3f2707f8eeb9ba2a791d3adbf18f900219b62942803b1", size = 136597, upload-time = "2025-10-24T15:50:00.12Z" }, + { url = "https://files.pythonhosted.org/packages/32/78/4fa0aeca65ee82bbabb49e055bd03fa4edea33f7c080c5c7b9601661ef72/orjson-3.11.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f28485bdca8617b79d44627f5fb04336897041dfd9fa66d383a49d09d86798bc", size = 137515, upload-time = "2025-10-24T15:50:01.57Z" }, + { url = "https://files.pythonhosted.org/packages/c1/9d/0c102e26e7fde40c4c98470796d050a2ec1953897e2c8ab0cb95b0759fa2/orjson-3.11.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bfc2a484cad3585e4ba61985a6062a4c2ed5c7925db6d39f1fa267c9d166487f", size = 136703, upload-time = "2025-10-24T15:50:02.944Z" }, + { url = "https://files.pythonhosted.org/packages/df/ac/2de7188705b4cdfaf0b6c97d2f7849c17d2003232f6e70df98602173f788/orjson-3.11.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e34dbd508cb91c54f9c9788923daca129fe5b55c5b4eebe713bf5ed3791280cf", size = 136311, upload-time = "2025-10-24T15:50:04.441Z" }, + { url = "https://files.pythonhosted.org/packages/e0/52/847fcd1a98407154e944feeb12e3b4d487a0e264c40191fb44d1269cbaa1/orjson-3.11.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b13c478fa413d4b4ee606ec8e11c3b2e52683a640b006bb586b3041c2ca5f606", size = 140127, upload-time = "2025-10-24T15:50:07.398Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ae/21d208f58bdb847dd4d0d9407e2929862561841baa22bdab7aea10ca088e/orjson-3.11.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:724ca721ecc8a831b319dcd72cfa370cc380db0bf94537f08f7edd0a7d4e1780", size = 406201, upload-time = "2025-10-24T15:50:08.796Z" }, + { url = "https://files.pythonhosted.org/packages/8d/55/0789d6de386c8366059db098a628e2ad8798069e94409b0d8935934cbcb9/orjson-3.11.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:977c393f2e44845ce1b540e19a786e9643221b3323dae190668a98672d43fb23", size = 149872, upload-time = "2025-10-24T15:50:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/cc/1d/7ff81ea23310e086c17b41d78a72270d9de04481e6113dbe2ac19118f7fb/orjson-3.11.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1e539e382cf46edec157ad66b0b0872a90d829a6b71f17cb633d6c160a223155", size = 139931, upload-time = "2025-10-24T15:50:11.623Z" }, + { url = "https://files.pythonhosted.org/packages/77/92/25b886252c50ed64be68c937b562b2f2333b45afe72d53d719e46a565a50/orjson-3.11.4-cp314-cp314-win32.whl", hash = "sha256:d63076d625babab9db5e7836118bdfa086e60f37d8a174194ae720161eb12394", size = 136065, upload-time = "2025-10-24T15:50:13.025Z" }, + { url = "https://files.pythonhosted.org/packages/63/b8/718eecf0bb7e9d64e4956afaafd23db9f04c776d445f59fe94f54bdae8f0/orjson-3.11.4-cp314-cp314-win_amd64.whl", hash = "sha256:0a54d6635fa3aaa438ae32e8570b9f0de36f3f6562c308d2a2a452e8b0592db1", size = 131310, upload-time = "2025-10-24T15:50:14.46Z" }, + { url = "https://files.pythonhosted.org/packages/1a/bf/def5e25d4d8bfce296a9a7c8248109bf58622c21618b590678f945a2c59c/orjson-3.11.4-cp314-cp314-win_arm64.whl", hash = "sha256:78b999999039db3cf58f6d230f524f04f75f129ba3d1ca2ed121f8657e575d3d", size = 126151, upload-time = "2025-10-24T15:50:15.878Z" }, +] + +[[package]] +name = "ormsgpack" +version = "1.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/f8/224c342c0e03e131aaa1a1f19aa2244e167001783a433f4eed10eedd834b/ormsgpack-1.11.0.tar.gz", hash = "sha256:7c9988e78fedba3292541eb3bb274fa63044ef4da2ddb47259ea70c05dee4206", size = 49357, upload-time = "2025-10-08T17:29:15.621Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/3d/6996193cb2babc47fc92456223bef7d141065357ad4204eccf313f47a7b3/ormsgpack-1.11.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:03d4e658dd6e1882a552ce1d13cc7b49157414e7d56a4091fbe7823225b08cba", size = 367965, upload-time = "2025-10-08T17:28:06.736Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/c83b805dd9caebb046f4ceeed3706d0902ed2dbbcf08b8464e89f2c52e05/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb67eb913c2b703f0ed39607fc56e50724dd41f92ce080a586b4d6149eb3fe4", size = 195209, upload-time = "2025-10-08T17:28:08.395Z" }, + { url = "https://files.pythonhosted.org/packages/3a/17/427d9c4f77b120f0af01d7a71d8144771c9388c2a81f712048320e31353b/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e54175b92411f73a238e5653a998627f6660de3def37d9dd7213e0fd264ca56", size = 205868, upload-time = "2025-10-08T17:28:09.688Z" }, + { url = "https://files.pythonhosted.org/packages/82/32/a9ce218478bdbf3fee954159900e24b314ab3064f7b6a217ccb1e3464324/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca2b197f4556e1823d1319869d4c5dc278be335286d2308b0ed88b59a5afcc25", size = 207391, upload-time = "2025-10-08T17:28:11.031Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d3/4413fe7454711596fdf08adabdfa686580e4656702015108e4975f00a022/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc62388262f58c792fe1e450e1d9dbcc174ed2fb0b43db1675dd7c5ff2319d6a", size = 377078, upload-time = "2025-10-08T17:28:12.39Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ad/13fae555a45e35ca1ca929a27c9ee0a3ecada931b9d44454658c543f9b9c/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c48bc10af74adfbc9113f3fb160dc07c61ad9239ef264c17e449eba3de343dc2", size = 470776, upload-time = "2025-10-08T17:28:13.484Z" }, + { url = "https://files.pythonhosted.org/packages/36/60/51178b093ffc4e2ef3381013a67223e7d56224434fba80047249f4a84b26/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a608d3a1d4fa4acdc5082168a54513cff91f47764cef435e81a483452f5f7647", size = 380862, upload-time = "2025-10-08T17:28:14.747Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e3/1cb6c161335e2ae7d711ecfb007a31a3936603626e347c13e5e53b7c7cf8/ormsgpack-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:97217b4f7f599ba45916b9c4c4b1d5656e8e2a4d91e2e191d72a7569d3c30923", size = 112058, upload-time = "2025-10-08T17:28:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/90164d00e8e94b48eff8a17bc2f4be6b71ae356a00904bc69d5e8afe80fb/ormsgpack-1.11.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c7be823f47d8e36648d4bc90634b93f02b7d7cc7480081195f34767e86f181fb", size = 367964, upload-time = "2025-10-08T17:28:16.778Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c2/fb6331e880a3446c1341e72c77bd5a46da3e92a8e2edf7ea84a4c6c14fff/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68accf15d1b013812755c0eb7a30e1fc2f81eb603a1a143bf0cda1b301cfa797", size = 195209, upload-time = "2025-10-08T17:28:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/50/4943fb5df8cc02da6b7b1ee2c2a7fb13aebc9f963d69280b1bb02b1fb178/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:805d06fb277d9a4e503c0c707545b49cde66cbb2f84e5cf7c58d81dfc20d8658", size = 205869, upload-time = "2025-10-08T17:28:19.01Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fa/e7e06835bfea9adeef43915143ce818098aecab0cbd3df584815adf3e399/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1e57cdf003e77acc43643bda151dc01f97147a64b11cdee1380bb9698a7601c", size = 207391, upload-time = "2025-10-08T17:28:20.352Z" }, + { url = "https://files.pythonhosted.org/packages/33/f0/f28a19e938a14ec223396e94f4782fbcc023f8c91f2ab6881839d3550f32/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:37fc05bdaabd994097c62e2f3e08f66b03f856a640ede6dc5ea340bd15b77f4d", size = 377081, upload-time = "2025-10-08T17:28:21.926Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e3/73d1d7287637401b0b6637e30ba9121e1aa1d9f5ea185ed9834ca15d512c/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a6e9db6c73eb46b2e4d97bdffd1368a66f54e6806b563a997b19c004ef165e1d", size = 470779, upload-time = "2025-10-08T17:28:22.993Z" }, + { url = "https://files.pythonhosted.org/packages/9c/46/7ba7f9721e766dd0dfe4cedf444439447212abffe2d2f4538edeeec8ccbd/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9c44eae5ac0196ffc8b5ed497c75511056508f2303fa4d36b208eb820cf209e", size = 380865, upload-time = "2025-10-08T17:28:24.012Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7d/bb92a0782bbe0626c072c0320001410cf3f6743ede7dc18f034b1a18edef/ormsgpack-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:11d0dfaf40ae7c6de4f7dbd1e4892e2e6a55d911ab1774357c481158d17371e4", size = 112058, upload-time = "2025-10-08T17:28:25.015Z" }, + { url = "https://files.pythonhosted.org/packages/28/1a/f07c6f74142815d67e1d9d98c5b2960007100408ade8242edac96d5d1c73/ormsgpack-1.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:0c63a3f7199a3099c90398a1bdf0cb577b06651a442dc5efe67f2882665e5b02", size = 105894, upload-time = "2025-10-08T17:28:25.93Z" }, + { url = "https://files.pythonhosted.org/packages/1e/16/2805ebfb3d2cbb6c661b5fae053960fc90a2611d0d93e2207e753e836117/ormsgpack-1.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3434d0c8d67de27d9010222de07fb6810fb9af3bb7372354ffa19257ac0eb83b", size = 368474, upload-time = "2025-10-08T17:28:27.532Z" }, + { url = "https://files.pythonhosted.org/packages/6f/39/6afae47822dca0ce4465d894c0bbb860a850ce29c157882dbdf77a5dd26e/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2da5bd097e8dbfa4eb0d4ccfe79acd6f538dee4493579e2debfe4fc8f4ca89b", size = 195321, upload-time = "2025-10-08T17:28:28.573Z" }, + { url = "https://files.pythonhosted.org/packages/f6/54/11eda6b59f696d2f16de469bfbe539c9f469c4b9eef5a513996b5879c6e9/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fdbaa0a5a8606a486960b60c24f2d5235d30ac7a8b98eeaea9854bffef14dc3d", size = 206036, upload-time = "2025-10-08T17:28:29.785Z" }, + { url = "https://files.pythonhosted.org/packages/1e/86/890430f704f84c4699ddad61c595d171ea2fd77a51fbc106f83981e83939/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3682f24f800c1837017ee90ce321086b2cbaef88db7d4cdbbda1582aa6508159", size = 207615, upload-time = "2025-10-08T17:28:31.076Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b9/77383e16c991c0ecb772205b966fc68d9c519e0b5f9c3913283cbed30ffe/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fcca21202bb05ccbf3e0e92f560ee59b9331182e4c09c965a28155efbb134993", size = 377195, upload-time = "2025-10-08T17:28:32.436Z" }, + { url = "https://files.pythonhosted.org/packages/20/e2/15f9f045d4947f3c8a5e0535259fddf027b17b1215367488b3565c573b9d/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c30e5c4655ba46152d722ec7468e8302195e6db362ec1ae2c206bc64f6030e43", size = 470960, upload-time = "2025-10-08T17:28:33.556Z" }, + { url = "https://files.pythonhosted.org/packages/b8/61/403ce188c4c495bc99dff921a0ad3d9d352dd6d3c4b629f3638b7f0cf79b/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7138a341f9e2c08c59368f03d3be25e8b87b3baaf10d30fb1f6f6b52f3d47944", size = 381174, upload-time = "2025-10-08T17:28:34.781Z" }, + { url = "https://files.pythonhosted.org/packages/14/a8/94c94bc48c68da4374870a851eea03fc5a45eb041182ad4c5ed9acfc05a4/ormsgpack-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d4bd8589b78a11026d47f4edf13c1ceab9088bb12451f34396afe6497db28a27", size = 112314, upload-time = "2025-10-08T17:28:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/19/d0/aa4cf04f04e4cc180ce7a8d8ddb5a7f3af883329cbc59645d94d3ba157a5/ormsgpack-1.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:e5e746a1223e70f111d4001dab9585ac8639eee8979ca0c8db37f646bf2961da", size = 106072, upload-time = "2025-10-08T17:28:37.518Z" }, + { url = "https://files.pythonhosted.org/packages/8b/35/e34722edb701d053cf2240f55974f17b7dbfd11fdef72bd2f1835bcebf26/ormsgpack-1.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e7b36ab7b45cb95217ae1f05f1318b14a3e5ef73cb00804c0f06233f81a14e8", size = 368502, upload-time = "2025-10-08T17:28:38.547Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6a/c2fc369a79d6aba2aa28c8763856c95337ac7fcc0b2742185cd19397212a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43402d67e03a9a35cc147c8c03f0c377cad016624479e1ee5b879b8425551484", size = 195344, upload-time = "2025-10-08T17:28:39.554Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6a/0f8e24b7489885534c1a93bdba7c7c434b9b8638713a68098867db9f254c/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:64fd992f932764d6306b70ddc755c1bc3405c4c6a69f77a36acf7af1c8f5ada4", size = 206045, upload-time = "2025-10-08T17:28:40.561Z" }, + { url = "https://files.pythonhosted.org/packages/99/71/8b460ba264f3c6f82ef5b1920335720094e2bd943057964ce5287d6df83a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0362fb7fe4a29c046c8ea799303079a09372653a1ce5a5a588f3bbb8088368d0", size = 207641, upload-time = "2025-10-08T17:28:41.736Z" }, + { url = "https://files.pythonhosted.org/packages/50/cf/f369446abaf65972424ed2651f2df2b7b5c3b735c93fc7fa6cfb81e34419/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:de2f7a65a9d178ed57be49eba3d0fc9b833c32beaa19dbd4ba56014d3c20b152", size = 377211, upload-time = "2025-10-08T17:28:43.12Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3f/948bb0047ce0f37c2efc3b9bb2bcfdccc61c63e0b9ce8088d4903ba39dcf/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:f38cfae95461466055af966fc922d06db4e1654966385cda2828653096db34da", size = 470973, upload-time = "2025-10-08T17:28:44.465Z" }, + { url = "https://files.pythonhosted.org/packages/31/a4/92a8114d1d017c14aaa403445060f345df9130ca532d538094f38e535988/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c88396189d238f183cea7831b07a305ab5c90d6d29b53288ae11200bd956357b", size = 381161, upload-time = "2025-10-08T17:28:46.063Z" }, + { url = "https://files.pythonhosted.org/packages/d0/64/5b76447da654798bfcfdfd64ea29447ff2b7f33fe19d0e911a83ad5107fc/ormsgpack-1.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:5403d1a945dd7c81044cebeca3f00a28a0f4248b33242a5d2d82111628043725", size = 112321, upload-time = "2025-10-08T17:28:47.393Z" }, + { url = "https://files.pythonhosted.org/packages/46/5e/89900d06db9ab81e7ec1fd56a07c62dfbdcda398c435718f4252e1dc52a0/ormsgpack-1.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:c57357b8d43b49722b876edf317bdad9e6d52071b523fdd7394c30cd1c67d5a0", size = 106084, upload-time = "2025-10-08T17:28:48.305Z" }, + { url = "https://files.pythonhosted.org/packages/4c/0b/c659e8657085c8c13f6a0224789f422620cef506e26573b5434defe68483/ormsgpack-1.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:d390907d90fd0c908211592c485054d7a80990697ef4dff4e436ac18e1aab98a", size = 368497, upload-time = "2025-10-08T17:28:49.297Z" }, + { url = "https://files.pythonhosted.org/packages/1b/0e/451e5848c7ed56bd287e8a2b5cb5926e54466f60936e05aec6cb299f9143/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6153c2e92e789509098e04c9aa116b16673bd88ec78fbe0031deeb34ab642d10", size = 195385, upload-time = "2025-10-08T17:28:50.314Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/90f78cbbe494959f2439c2ec571f08cd3464c05a6a380b0d621c622122a9/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2b2c2a065a94d742212b2018e1fecd8f8d72f3c50b53a97d1f407418093446d", size = 206114, upload-time = "2025-10-08T17:28:51.336Z" }, + { url = "https://files.pythonhosted.org/packages/fb/db/34163f4c0923bea32dafe42cd878dcc66795a3e85669bc4b01c1e2b92a7b/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:110e65b5340f3d7ef8b0009deae3c6b169437e6b43ad5a57fd1748085d29d2ac", size = 207679, upload-time = "2025-10-08T17:28:53.627Z" }, + { url = "https://files.pythonhosted.org/packages/b6/14/04ee741249b16f380a9b4a0cc19d4134d0b7c74bab27a2117da09e525eb9/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c27e186fca96ab34662723e65b420919910acbbc50fc8e1a44e08f26268cb0e0", size = 377237, upload-time = "2025-10-08T17:28:56.12Z" }, + { url = "https://files.pythonhosted.org/packages/89/ff/53e588a6aaa833237471caec679582c2950f0e7e1a8ba28c1511b465c1f4/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d56b1f877c13d499052d37a3db2378a97d5e1588d264f5040b3412aee23d742c", size = 471021, upload-time = "2025-10-08T17:28:57.299Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f9/f20a6d9ef2be04da3aad05e8f5699957e9a30c6d5c043a10a296afa7e890/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c88e28cd567c0a3269f624b4ade28142d5e502c8e826115093c572007af5be0a", size = 381205, upload-time = "2025-10-08T17:28:58.872Z" }, + { url = "https://files.pythonhosted.org/packages/f8/64/96c07d084b479ac8b7821a77ffc8d3f29d8b5c95ebfdf8db1c03dff02762/ormsgpack-1.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:8811160573dc0a65f62f7e0792c4ca6b7108dfa50771edb93f9b84e2d45a08ae", size = 112374, upload-time = "2025-10-08T17:29:00Z" }, + { url = "https://files.pythonhosted.org/packages/88/a5/5dcc18b818d50213a3cadfe336bb6163a102677d9ce87f3d2f1a1bee0f8c/ormsgpack-1.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:23e30a8d3c17484cf74e75e6134322255bd08bc2b5b295cc9c442f4bae5f3c2d", size = 106056, upload-time = "2025-10-08T17:29:01.29Z" }, + { url = "https://files.pythonhosted.org/packages/19/2b/776d1b411d2be50f77a6e6e94a25825cca55dcacfe7415fd691a144db71b/ormsgpack-1.11.0-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2905816502adfaf8386a01dd85f936cd378d243f4f5ee2ff46f67f6298dc90d5", size = 368661, upload-time = "2025-10-08T17:29:02.382Z" }, + { url = "https://files.pythonhosted.org/packages/a9/0c/81a19e6115b15764db3d241788f9fac093122878aaabf872cc545b0c4650/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c04402fb9a0a9b9f18fbafd6d5f8398ee99b3ec619fb63952d3a954bc9d47daa", size = 195539, upload-time = "2025-10-08T17:29:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/97/86/e5b50247a61caec5718122feb2719ea9d451d30ac0516c288c1dbc6408e8/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a025ec07ac52056ecfd9e57b5cbc6fff163f62cb9805012b56cda599157f8ef2", size = 207718, upload-time = "2025-10-08T17:29:04.545Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/1e/4f0a3233767010308f2fd6bd0814597e3f63f1dc98304a9112b8759df4ff/pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74", size = 819383, upload-time = "2025-10-17T15:04:21.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/6b/83661fa77dcefa195ad5f8cd9af3d1a7450fd57cc883ad04d65446ac2029/pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf", size = 462431, upload-time = "2025-10-17T15:04:19.346Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, + { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, + { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, + { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, + { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, + { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, + { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, + { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, + { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, + { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, + { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, + { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, + { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, + { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, + { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, + { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, + { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, + { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, + { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, + { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, + { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, + { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, + { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, + { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, + { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, + { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, + { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, + { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, + { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, + { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, + { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, + { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, + { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, + { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, + { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, + { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, + { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, + { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, + { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, + { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, + { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, + { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, + { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, + { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, + { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, + { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, + { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, + { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, +] + +[[package]] +name = "pytest-socket" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/ff/90c7e1e746baf3d62ce864c479fd53410b534818b9437413903596f81580/pytest_socket-0.7.0.tar.gz", hash = "sha256:71ab048cbbcb085c15a4423b73b619a8b35d6a307f46f78ea46be51b1b7e11b3", size = 12389, upload-time = "2024-01-28T20:17:23.177Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/58/5d14cb5cb59409e491ebe816c47bf81423cd03098ea92281336320ae5681/pytest_socket-0.7.0-py3-none-any.whl", hash = "sha256:7e0f4642177d55d317bbd58fc68c6bd9048d6eadb2d46a89307fa9221336ce45", size = 6754, upload-time = "2024-01-28T20:17:22.105Z" }, +] + +[[package]] +name = "pytest-watcher" +version = "0.4.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "watchdog" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/72/a2a1e81f1b272ddd9a1848af4959c87c39aa95c0bbfb3007cacb86c47fa9/pytest_watcher-0.4.3.tar.gz", hash = "sha256:0cb0e4661648c8c0ff2b2d25efa5a8e421784b9e4c60fcecbf9b7c30b2d731b3", size = 10386, upload-time = "2024-08-28T17:37:46.662Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/3a/c44a76c6bb5e9e896d9707fb1c704a31a0136950dec9514373ced0684d56/pytest_watcher-0.4.3-py3-none-any.whl", hash = "sha256:d59b1e1396f33a65ea4949b713d6884637755d641646960056a90b267c3460f9", size = 11852, upload-time = "2024-08-28T17:37:45.731Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "regex" +version = "2025.10.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/c8/1d2160d36b11fbe0a61acb7c3c81ab032d9ec8ad888ac9e0a61b85ab99dd/regex-2025.10.23.tar.gz", hash = "sha256:8cbaf8ceb88f96ae2356d01b9adf5e6306fa42fa6f7eab6b97794e37c959ac26", size = 401266, upload-time = "2025-10-21T15:58:20.23Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/11/849d5d23633a77047465eaae4cc0cbf24ded7aa496c02e8b9710e28b1687/regex-2025.10.23-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:17bbcde374bef1c5fad9b131f0e28a6a24856dd90368d8c0201e2b5a69533daa", size = 487957, upload-time = "2025-10-21T15:54:26.151Z" }, + { url = "https://files.pythonhosted.org/packages/87/12/5985386e7e3200a0d6a6417026d2c758d783a932428a5efc0a42ca1ddf74/regex-2025.10.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4e10434279cc8567f99ca6e018e9025d14f2fded2a603380b6be2090f476426", size = 290419, upload-time = "2025-10-21T15:54:28.804Z" }, + { url = "https://files.pythonhosted.org/packages/67/cf/a8615923f962f8fdc41a3a6093a48726955e8b1993f4614b26a41d249f9b/regex-2025.10.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c9bb421cbe7012c744a5a56cf4d6c80829c72edb1a2991677299c988d6339c8", size = 288285, upload-time = "2025-10-21T15:54:30.47Z" }, + { url = "https://files.pythonhosted.org/packages/4e/3d/6a3a1e12c86354cd0b3cbf8c3dd6acbe853609ee3b39d47ecd3ce95caf84/regex-2025.10.23-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:275cd1c2ed8c4a78ebfa489618d7aee762e8b4732da73573c3e38236ec5f65de", size = 781458, upload-time = "2025-10-21T15:54:31.978Z" }, + { url = "https://files.pythonhosted.org/packages/46/47/76a8da004489f2700361754859e373b87a53d043de8c47f4d1583fd39d78/regex-2025.10.23-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7b426ae7952f3dc1e73a86056d520bd4e5f021397484a6835902fc5648bcacce", size = 850605, upload-time = "2025-10-21T15:54:33.753Z" }, + { url = "https://files.pythonhosted.org/packages/67/05/fa886461f97d45a6f4b209699cb994dc6d6212d6e219d29444dac5005775/regex-2025.10.23-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c5cdaf5b6d37c7da1967dbe729d819461aab6a98a072feef65bbcff0a6e60649", size = 898563, upload-time = "2025-10-21T15:54:35.431Z" }, + { url = "https://files.pythonhosted.org/packages/2d/db/3ddd8d01455f23cabad7499f4199de0df92f5e96d39633203ff9d0b592dc/regex-2025.10.23-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bfeff0b08f296ab28b4332a7e03ca31c437ee78b541ebc874bbf540e5932f8d", size = 791535, upload-time = "2025-10-21T15:54:37.269Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ae/0fa5cbf41ca92b6ec3370222fcb6c68b240d68ab10e803d086c03a19fd9e/regex-2025.10.23-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f97236a67307b775f30a74ef722b64b38b7ab7ba3bb4a2508518a5de545459c", size = 782461, upload-time = "2025-10-21T15:54:39.187Z" }, + { url = "https://files.pythonhosted.org/packages/d4/23/70af22a016df11af4def27870eb175c2c7235b72d411ecf75a4b4a422cb6/regex-2025.10.23-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:be19e7de499940cd72475fb8e46ab2ecb1cf5906bebdd18a89f9329afb1df82f", size = 774583, upload-time = "2025-10-21T15:54:41.018Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ee/a54a6851f6905f33d3c4ed64e8737b1d85ed01b5724712530ddc0f9abdb1/regex-2025.10.23-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:883df76ee42d9ecb82b37ff8d01caea5895b3f49630a64d21111078bbf8ef64c", size = 845649, upload-time = "2025-10-21T15:54:42.615Z" }, + { url = "https://files.pythonhosted.org/packages/80/7d/c3ec1cae14e01fab00e38c41ed35f47a853359e95e9c023e9a4381bb122c/regex-2025.10.23-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2e9117d1d35fc2addae6281019ecc70dc21c30014b0004f657558b91c6a8f1a7", size = 836037, upload-time = "2025-10-21T15:54:44.63Z" }, + { url = "https://files.pythonhosted.org/packages/15/ae/45771140dd43c4d67c87b54d3728078ed6a96599d9fc7ba6825086236782/regex-2025.10.23-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0ff1307f531a5d8cf5c20ea517254551ff0a8dc722193aab66c656c5a900ea68", size = 779705, upload-time = "2025-10-21T15:54:46.08Z" }, + { url = "https://files.pythonhosted.org/packages/b8/95/074e2581760eafce7c816a352b7d3a322536e5b68c346d1a8bacd895545c/regex-2025.10.23-cp310-cp310-win32.whl", hash = "sha256:7888475787cbfee4a7cd32998eeffe9a28129fa44ae0f691b96cb3939183ef41", size = 265663, upload-time = "2025-10-21T15:54:47.854Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c7/a25f56a718847e34d3f1608c72eadeb67653bff1a0411da023dd8f4c647b/regex-2025.10.23-cp310-cp310-win_amd64.whl", hash = "sha256:ec41a905908496ce4906dab20fb103c814558db1d69afc12c2f384549c17936a", size = 277587, upload-time = "2025-10-21T15:54:49.571Z" }, + { url = "https://files.pythonhosted.org/packages/d3/e5/63eb17c6b5deaefd93c2bbb1feae7c0a8d2157da25883a6ca2569cf7a663/regex-2025.10.23-cp310-cp310-win_arm64.whl", hash = "sha256:b2b7f19a764d5e966d5a62bf2c28a8b4093cc864c6734510bdb4aeb840aec5e6", size = 269979, upload-time = "2025-10-21T15:54:51.375Z" }, + { url = "https://files.pythonhosted.org/packages/82/e5/74b7cd5cd76b4171f9793042045bb1726f7856dd56e582fc3e058a7a8a5e/regex-2025.10.23-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6c531155bf9179345e85032052a1e5fe1a696a6abf9cea54b97e8baefff970fd", size = 487960, upload-time = "2025-10-21T15:54:53.253Z" }, + { url = "https://files.pythonhosted.org/packages/b9/08/854fa4b3b20471d1df1c71e831b6a1aa480281e37791e52a2df9641ec5c6/regex-2025.10.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:912e9df4e89d383681268d38ad8f5780d7cccd94ba0e9aa09ca7ab7ab4f8e7eb", size = 290425, upload-time = "2025-10-21T15:54:55.21Z" }, + { url = "https://files.pythonhosted.org/packages/ab/d3/6272b1dd3ca1271661e168762b234ad3e00dbdf4ef0c7b9b72d2d159efa7/regex-2025.10.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f375c61bfc3138b13e762fe0ae76e3bdca92497816936534a0177201666f44f", size = 288278, upload-time = "2025-10-21T15:54:56.862Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/c7b365dd9d9bc0a36e018cb96f2ffb60d2ba8deb589a712b437f67de2920/regex-2025.10.23-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e248cc9446081119128ed002a3801f8031e0c219b5d3c64d3cc627da29ac0a33", size = 793289, upload-time = "2025-10-21T15:54:58.352Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fb/b8fbe9aa16cf0c21f45ec5a6c74b4cecbf1a1c0deb7089d4a6f83a9c1caa/regex-2025.10.23-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b52bf9282fdf401e4f4e721f0f61fc4b159b1307244517789702407dd74e38ca", size = 860321, upload-time = "2025-10-21T15:54:59.813Z" }, + { url = "https://files.pythonhosted.org/packages/b0/81/bf41405c772324926a9bd8a640dedaa42da0e929241834dfce0733070437/regex-2025.10.23-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c084889ab2c59765a0d5ac602fd1c3c244f9b3fcc9a65fdc7ba6b74c5287490", size = 907011, upload-time = "2025-10-21T15:55:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fb/5ad6a8b92d3f88f3797b51bb4ef47499acc2d0b53d2fbe4487a892f37a73/regex-2025.10.23-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d80e8eb79009bdb0936658c44ca06e2fbbca67792013e3818eea3f5f228971c2", size = 800312, upload-time = "2025-10-21T15:55:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/42/48/b4efba0168a2b57f944205d823f8e8a3a1ae6211a34508f014ec2c712f4f/regex-2025.10.23-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6f259118ba87b814a8ec475380aee5f5ae97a75852a3507cf31d055b01b5b40", size = 782839, upload-time = "2025-10-21T15:55:05.641Z" }, + { url = "https://files.pythonhosted.org/packages/13/2a/c9efb4c6c535b0559c1fa8e431e0574d229707c9ca718600366fcfef6801/regex-2025.10.23-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9b8c72a242683dcc72d37595c4f1278dfd7642b769e46700a8df11eab19dfd82", size = 854270, upload-time = "2025-10-21T15:55:07.27Z" }, + { url = "https://files.pythonhosted.org/packages/34/2d/68eecc1bdaee020e8ba549502291c9450d90d8590d0552247c9b543ebf7b/regex-2025.10.23-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d7b7a0a3df9952f9965342159e0c1f05384c0f056a47ce8b61034f8cecbe83", size = 845771, upload-time = "2025-10-21T15:55:09.477Z" }, + { url = "https://files.pythonhosted.org/packages/a5/cd/a1ae499cf9b87afb47a67316bbf1037a7c681ffe447c510ed98c0aa2c01c/regex-2025.10.23-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:413bfea20a484c524858125e92b9ce6ffdd0a4b97d4ff96b5859aa119b0f1bdd", size = 788778, upload-time = "2025-10-21T15:55:11.396Z" }, + { url = "https://files.pythonhosted.org/packages/38/f9/70765e63f5ea7d43b2b6cd4ee9d3323f16267e530fb2a420d92d991cf0fc/regex-2025.10.23-cp311-cp311-win32.whl", hash = "sha256:f76deef1f1019a17dad98f408b8f7afc4bd007cbe835ae77b737e8c7f19ae575", size = 265666, upload-time = "2025-10-21T15:55:13.306Z" }, + { url = "https://files.pythonhosted.org/packages/9c/1a/18e9476ee1b63aaec3844d8e1cb21842dc19272c7e86d879bfc0dcc60db3/regex-2025.10.23-cp311-cp311-win_amd64.whl", hash = "sha256:59bba9f7125536f23fdab5deeea08da0c287a64c1d3acc1c7e99515809824de8", size = 277600, upload-time = "2025-10-21T15:55:15.087Z" }, + { url = "https://files.pythonhosted.org/packages/1d/1b/c019167b1f7a8ec77251457e3ff0339ed74ca8bce1ea13138dc98309c923/regex-2025.10.23-cp311-cp311-win_arm64.whl", hash = "sha256:b103a752b6f1632ca420225718d6ed83f6a6ced3016dd0a4ab9a6825312de566", size = 269974, upload-time = "2025-10-21T15:55:16.841Z" }, + { url = "https://files.pythonhosted.org/packages/f6/57/eeb274d83ab189d02d778851b1ac478477522a92b52edfa6e2ae9ff84679/regex-2025.10.23-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7a44d9c00f7a0a02d3b777429281376370f3d13d2c75ae74eb94e11ebcf4a7fc", size = 489187, upload-time = "2025-10-21T15:55:18.322Z" }, + { url = "https://files.pythonhosted.org/packages/55/5c/7dad43a9b6ea88bf77e0b8b7729a4c36978e1043165034212fd2702880c6/regex-2025.10.23-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b83601f84fde939ae3478bb32a3aef36f61b58c3208d825c7e8ce1a735f143f2", size = 291122, upload-time = "2025-10-21T15:55:20.2Z" }, + { url = "https://files.pythonhosted.org/packages/66/21/38b71e6f2818f0f4b281c8fba8d9d57cfca7b032a648fa59696e0a54376a/regex-2025.10.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec13647907bb9d15fd192bbfe89ff06612e098a5709e7d6ecabbdd8f7908fc45", size = 288797, upload-time = "2025-10-21T15:55:21.932Z" }, + { url = "https://files.pythonhosted.org/packages/be/95/888f069c89e7729732a6d7cca37f76b44bfb53a1e35dda8a2c7b65c1b992/regex-2025.10.23-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78d76dd2957d62501084e7012ddafc5fcd406dd982b7a9ca1ea76e8eaaf73e7e", size = 798442, upload-time = "2025-10-21T15:55:23.747Z" }, + { url = "https://files.pythonhosted.org/packages/76/70/4f903c608faf786627a8ee17c06e0067b5acade473678b69c8094b248705/regex-2025.10.23-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8668e5f067e31a47699ebb354f43aeb9c0ef136f915bd864243098524482ac43", size = 864039, upload-time = "2025-10-21T15:55:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/62/19/2df67b526bf25756c7f447dde554fc10a220fd839cc642f50857d01e4a7b/regex-2025.10.23-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a32433fe3deb4b2d8eda88790d2808fed0dc097e84f5e683b4cd4f42edef6cca", size = 912057, upload-time = "2025-10-21T15:55:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/99/14/9a39b7c9e007968411bc3c843cc14cf15437510c0a9991f080cab654fd16/regex-2025.10.23-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d97d73818c642c938db14c0668167f8d39520ca9d983604575ade3fda193afcc", size = 803374, upload-time = "2025-10-21T15:55:28.9Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f7/3495151dd3ca79949599b6d069b72a61a2c5e24fc441dccc79dcaf708fe6/regex-2025.10.23-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bca7feecc72ee33579e9f6ddf8babbe473045717a0e7dbc347099530f96e8b9a", size = 787714, upload-time = "2025-10-21T15:55:30.628Z" }, + { url = "https://files.pythonhosted.org/packages/28/65/ee882455e051131869957ee8597faea45188c9a98c0dad724cfb302d4580/regex-2025.10.23-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7e24af51e907d7457cc4a72691ec458320b9ae67dc492f63209f01eecb09de32", size = 858392, upload-time = "2025-10-21T15:55:32.322Z" }, + { url = "https://files.pythonhosted.org/packages/53/25/9287fef5be97529ebd3ac79d256159cb709a07eb58d4be780d1ca3885da8/regex-2025.10.23-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d10bcde58bbdf18146f3a69ec46dd03233b94a4a5632af97aa5378da3a47d288", size = 850484, upload-time = "2025-10-21T15:55:34.037Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b4/b49b88b4fea2f14dc73e5b5842755e782fc2e52f74423d6f4adc130d5880/regex-2025.10.23-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:44383bc0c933388516c2692c9a7503e1f4a67e982f20b9a29d2fb70c6494f147", size = 789634, upload-time = "2025-10-21T15:55:35.958Z" }, + { url = "https://files.pythonhosted.org/packages/b6/3c/2f8d199d0e84e78bcd6bdc2be9b62410624f6b796e2893d1837ae738b160/regex-2025.10.23-cp312-cp312-win32.whl", hash = "sha256:6040a86f95438a0114bba16e51dfe27f1bc004fd29fe725f54a586f6d522b079", size = 266060, upload-time = "2025-10-21T15:55:37.902Z" }, + { url = "https://files.pythonhosted.org/packages/d7/67/c35e80969f6ded306ad70b0698863310bdf36aca57ad792f45ddc0e2271f/regex-2025.10.23-cp312-cp312-win_amd64.whl", hash = "sha256:436b4c4352fe0762e3bfa34a5567079baa2ef22aa9c37cf4d128979ccfcad842", size = 276931, upload-time = "2025-10-21T15:55:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/f5/a1/4ed147de7d2b60174f758412c87fa51ada15cd3296a0ff047f4280aaa7ca/regex-2025.10.23-cp312-cp312-win_arm64.whl", hash = "sha256:f4b1b1991617055b46aff6f6db24888c1f05f4db9801349d23f09ed0714a9335", size = 270103, upload-time = "2025-10-21T15:55:41.24Z" }, + { url = "https://files.pythonhosted.org/packages/28/c6/195a6217a43719d5a6a12cc192a22d12c40290cecfa577f00f4fb822f07d/regex-2025.10.23-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b7690f95404a1293923a296981fd943cca12c31a41af9c21ba3edd06398fc193", size = 488956, upload-time = "2025-10-21T15:55:42.887Z" }, + { url = "https://files.pythonhosted.org/packages/4c/93/181070cd1aa2fa541ff2d3afcf763ceecd4937b34c615fa92765020a6c90/regex-2025.10.23-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1a32d77aeaea58a13230100dd8797ac1a84c457f3af2fdf0d81ea689d5a9105b", size = 290997, upload-time = "2025-10-21T15:55:44.53Z" }, + { url = "https://files.pythonhosted.org/packages/b6/c5/9d37fbe3a40ed8dda78c23e1263002497540c0d1522ed75482ef6c2000f0/regex-2025.10.23-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b24b29402f264f70a3c81f45974323b41764ff7159655360543b7cabb73e7d2f", size = 288686, upload-time = "2025-10-21T15:55:46.186Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e7/db610ff9f10c2921f9b6ac0c8d8be4681b28ddd40fc0549429366967e61f/regex-2025.10.23-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:563824a08c7c03d96856d84b46fdb3bbb7cfbdf79da7ef68725cda2ce169c72a", size = 798466, upload-time = "2025-10-21T15:55:48.24Z" }, + { url = "https://files.pythonhosted.org/packages/90/10/aab883e1fa7fe2feb15ac663026e70ca0ae1411efa0c7a4a0342d9545015/regex-2025.10.23-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0ec8bdd88d2e2659c3518087ee34b37e20bd169419ffead4240a7004e8ed03b", size = 863996, upload-time = "2025-10-21T15:55:50.478Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b0/8f686dd97a51f3b37d0238cd00a6d0f9ccabe701f05b56de1918571d0d61/regex-2025.10.23-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b577601bfe1d33913fcd9276d7607bbac827c4798d9e14d04bf37d417a6c41cb", size = 912145, upload-time = "2025-10-21T15:55:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ca/639f8cd5b08797bca38fc5e7e07f76641a428cf8c7fca05894caf045aa32/regex-2025.10.23-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c9f2c68ac6cb3de94eea08a437a75eaa2bd33f9e97c84836ca0b610a5804368", size = 803370, upload-time = "2025-10-21T15:55:53.944Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1e/a40725bb76959eddf8abc42a967bed6f4851b39f5ac4f20e9794d7832aa5/regex-2025.10.23-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89f8b9ea3830c79468e26b0e21c3585f69f105157c2154a36f6b7839f8afb351", size = 787767, upload-time = "2025-10-21T15:55:56.004Z" }, + { url = "https://files.pythonhosted.org/packages/3d/d8/8ee9858062936b0f99656dce390aa667c6e7fb0c357b1b9bf76fb5e2e708/regex-2025.10.23-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:98fd84c4e4ea185b3bb5bf065261ab45867d8875032f358a435647285c722673", size = 858335, upload-time = "2025-10-21T15:55:58.185Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0a/ed5faaa63fa8e3064ab670e08061fbf09e3a10235b19630cf0cbb9e48c0a/regex-2025.10.23-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1e11d3e5887b8b096f96b4154dfb902f29c723a9556639586cd140e77e28b313", size = 850402, upload-time = "2025-10-21T15:56:00.023Z" }, + { url = "https://files.pythonhosted.org/packages/79/14/d05f617342f4b2b4a23561da500ca2beab062bfcc408d60680e77ecaf04d/regex-2025.10.23-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f13450328a6634348d47a88367e06b64c9d84980ef6a748f717b13f8ce64e87", size = 789739, upload-time = "2025-10-21T15:56:01.967Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7b/e8ce8eef42a15f2c3461f8b3e6e924bbc86e9605cb534a393aadc8d3aff8/regex-2025.10.23-cp313-cp313-win32.whl", hash = "sha256:37be9296598a30c6a20236248cb8b2c07ffd54d095b75d3a2a2ee5babdc51df1", size = 266054, upload-time = "2025-10-21T15:56:05.291Z" }, + { url = "https://files.pythonhosted.org/packages/71/2d/55184ed6be6473187868d2f2e6a0708195fc58270e62a22cbf26028f2570/regex-2025.10.23-cp313-cp313-win_amd64.whl", hash = "sha256:ea7a3c283ce0f06fe789365841e9174ba05f8db16e2fd6ae00a02df9572c04c0", size = 276917, upload-time = "2025-10-21T15:56:07.303Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d4/927eced0e2bd45c45839e556f987f8c8f8683268dd3c00ad327deb3b0172/regex-2025.10.23-cp313-cp313-win_arm64.whl", hash = "sha256:d9a4953575f300a7bab71afa4cd4ac061c7697c89590a2902b536783eeb49a4f", size = 270105, upload-time = "2025-10-21T15:56:09.857Z" }, + { url = "https://files.pythonhosted.org/packages/3e/b3/95b310605285573341fc062d1d30b19a54f857530e86c805f942c4ff7941/regex-2025.10.23-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:7d6606524fa77b3912c9ef52a42ef63c6cfbfc1077e9dc6296cd5da0da286044", size = 491850, upload-time = "2025-10-21T15:56:11.685Z" }, + { url = "https://files.pythonhosted.org/packages/a4/8f/207c2cec01e34e56db1eff606eef46644a60cf1739ecd474627db90ad90b/regex-2025.10.23-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c037aadf4d64bdc38af7db3dbd34877a057ce6524eefcb2914d6d41c56f968cc", size = 292537, upload-time = "2025-10-21T15:56:13.963Z" }, + { url = "https://files.pythonhosted.org/packages/98/3b/025240af4ada1dc0b5f10d73f3e5122d04ce7f8908ab8881e5d82b9d61b6/regex-2025.10.23-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:99018c331fb2529084a0c9b4c713dfa49fafb47c7712422e49467c13a636c656", size = 290904, upload-time = "2025-10-21T15:56:16.016Z" }, + { url = "https://files.pythonhosted.org/packages/81/8e/104ac14e2d3450c43db18ec03e1b96b445a94ae510b60138f00ce2cb7ca1/regex-2025.10.23-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fd8aba965604d70306eb90a35528f776e59112a7114a5162824d43b76fa27f58", size = 807311, upload-time = "2025-10-21T15:56:17.818Z" }, + { url = "https://files.pythonhosted.org/packages/19/63/78aef90141b7ce0be8a18e1782f764f6997ad09de0e05251f0d2503a914a/regex-2025.10.23-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:238e67264b4013e74136c49f883734f68656adf8257bfa13b515626b31b20f8e", size = 873241, upload-time = "2025-10-21T15:56:19.941Z" }, + { url = "https://files.pythonhosted.org/packages/b3/a8/80eb1201bb49ae4dba68a1b284b4211ed9daa8e74dc600018a10a90399fb/regex-2025.10.23-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b2eb48bd9848d66fd04826382f5e8491ae633de3233a3d64d58ceb4ecfa2113a", size = 914794, upload-time = "2025-10-21T15:56:22.488Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d5/1984b6ee93281f360a119a5ca1af6a8ca7d8417861671388bf750becc29b/regex-2025.10.23-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d36591ce06d047d0c0fe2fc5f14bfbd5b4525d08a7b6a279379085e13f0e3d0e", size = 812581, upload-time = "2025-10-21T15:56:24.319Z" }, + { url = "https://files.pythonhosted.org/packages/c4/39/11ebdc6d9927172a64ae237d16763145db6bd45ebb4055c17b88edab72a7/regex-2025.10.23-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b5d4ece8628d6e364302006366cea3ee887db397faebacc5dacf8ef19e064cf8", size = 795346, upload-time = "2025-10-21T15:56:26.232Z" }, + { url = "https://files.pythonhosted.org/packages/3b/b4/89a591bcc08b5e436af43315284bd233ba77daf0cf20e098d7af12f006c1/regex-2025.10.23-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:39a7e8083959cb1c4ff74e483eecb5a65d3b3e1d821b256e54baf61782c906c6", size = 868214, upload-time = "2025-10-21T15:56:28.597Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ff/58ba98409c1dbc8316cdb20dafbc63ed267380a07780cafecaf5012dabc9/regex-2025.10.23-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:842d449a8fefe546f311656cf8c0d6729b08c09a185f1cad94c756210286d6a8", size = 854540, upload-time = "2025-10-21T15:56:30.875Z" }, + { url = "https://files.pythonhosted.org/packages/9a/f2/4a9e9338d67626e2071b643f828a482712ad15889d7268e11e9a63d6f7e9/regex-2025.10.23-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d614986dc68506be8f00474f4f6960e03e4ca9883f7df47744800e7d7c08a494", size = 799346, upload-time = "2025-10-21T15:56:32.725Z" }, + { url = "https://files.pythonhosted.org/packages/63/be/543d35c46bebf6f7bf2be538cca74d6585f25714700c36f37f01b92df551/regex-2025.10.23-cp313-cp313t-win32.whl", hash = "sha256:a5b7a26b51a9df473ec16a1934d117443a775ceb7b39b78670b2e21893c330c9", size = 268657, upload-time = "2025-10-21T15:56:34.577Z" }, + { url = "https://files.pythonhosted.org/packages/14/9f/4dd6b7b612037158bb2c9bcaa710e6fb3c40ad54af441b9c53b3a137a9f1/regex-2025.10.23-cp313-cp313t-win_amd64.whl", hash = "sha256:ce81c5544a5453f61cb6f548ed358cfb111e3b23f3cd42d250a4077a6be2a7b6", size = 280075, upload-time = "2025-10-21T15:56:36.767Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/5bd0672aa65d38c8da6747c17c8b441bdb53d816c569e3261013af8e83cf/regex-2025.10.23-cp313-cp313t-win_arm64.whl", hash = "sha256:e9bf7f6699f490e4e43c44757aa179dab24d1960999c84ab5c3d5377714ed473", size = 271219, upload-time = "2025-10-21T15:56:39.033Z" }, + { url = "https://files.pythonhosted.org/packages/73/f6/0caf29fec943f201fbc8822879c99d31e59c1d51a983d9843ee5cf398539/regex-2025.10.23-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:5b5cb5b6344c4c4c24b2dc87b0bfee78202b07ef7633385df70da7fcf6f7cec6", size = 488960, upload-time = "2025-10-21T15:56:40.849Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7d/ebb7085b8fa31c24ce0355107cea2b92229d9050552a01c5d291c42aecea/regex-2025.10.23-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a6ce7973384c37bdf0f371a843f95a6e6f4e1489e10e0cf57330198df72959c5", size = 290932, upload-time = "2025-10-21T15:56:42.875Z" }, + { url = "https://files.pythonhosted.org/packages/27/41/43906867287cbb5ca4cee671c3cc8081e15deef86a8189c3aad9ac9f6b4d/regex-2025.10.23-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2ee3663f2c334959016b56e3bd0dd187cbc73f948e3a3af14c3caaa0c3035d10", size = 288766, upload-time = "2025-10-21T15:56:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/ab/9e/ea66132776700fc77a39b1056e7a5f1308032fead94507e208dc6716b7cd/regex-2025.10.23-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2003cc82a579107e70d013482acce8ba773293f2db534fb532738395c557ff34", size = 798884, upload-time = "2025-10-21T15:56:47.178Z" }, + { url = "https://files.pythonhosted.org/packages/d5/99/aed1453687ab63819a443930770db972c5c8064421f0d9f5da9ad029f26b/regex-2025.10.23-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:182c452279365a93a9f45874f7f191ec1c51e1f1eb41bf2b16563f1a40c1da3a", size = 864768, upload-time = "2025-10-21T15:56:49.793Z" }, + { url = "https://files.pythonhosted.org/packages/99/5d/732fe747a1304805eb3853ce6337eea16b169f7105a0d0dd9c6a5ffa9948/regex-2025.10.23-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b1249e9ff581c5b658c8f0437f883b01f1edcf424a16388591e7c05e5e9e8b0c", size = 911394, upload-time = "2025-10-21T15:56:52.186Z" }, + { url = "https://files.pythonhosted.org/packages/5e/48/58a1f6623466522352a6efa153b9a3714fc559d9f930e9bc947b4a88a2c3/regex-2025.10.23-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b841698f93db3ccc36caa1900d2a3be281d9539b822dc012f08fc80b46a3224", size = 803145, upload-time = "2025-10-21T15:56:55.142Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f6/7dea79be2681a5574ab3fc237aa53b2c1dfd6bd2b44d4640b6c76f33f4c1/regex-2025.10.23-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:956d89e0c92d471e8f7eee73f73fdff5ed345886378c45a43175a77538a1ffe4", size = 787831, upload-time = "2025-10-21T15:56:57.203Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ad/07b76950fbbe65f88120ca2d8d845047c401450f607c99ed38862904671d/regex-2025.10.23-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5c259cb363299a0d90d63b5c0d7568ee98419861618a95ee9d91a41cb9954462", size = 859162, upload-time = "2025-10-21T15:56:59.195Z" }, + { url = "https://files.pythonhosted.org/packages/41/87/374f3b2021b22aa6a4fc0b750d63f9721e53d1631a238f7a1c343c1cd288/regex-2025.10.23-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:185d2b18c062820b3a40d8fefa223a83f10b20a674bf6e8c4a432e8dfd844627", size = 849899, upload-time = "2025-10-21T15:57:01.747Z" }, + { url = "https://files.pythonhosted.org/packages/12/4a/7f7bb17c5a5a9747249807210e348450dab9212a46ae6d23ebce86ba6a2b/regex-2025.10.23-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:281d87fa790049c2b7c1b4253121edd80b392b19b5a3d28dc2a77579cb2a58ec", size = 789372, upload-time = "2025-10-21T15:57:04.018Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/9c7728ff544fea09bbc8635e4c9e7c423b11c24f1a7a14e6ac4831466709/regex-2025.10.23-cp314-cp314-win32.whl", hash = "sha256:63b81eef3656072e4ca87c58084c7a9c2b81d41a300b157be635a8a675aacfb8", size = 271451, upload-time = "2025-10-21T15:57:06.266Z" }, + { url = "https://files.pythonhosted.org/packages/48/f8/ef7837ff858eb74079c4804c10b0403c0b740762e6eedba41062225f7117/regex-2025.10.23-cp314-cp314-win_amd64.whl", hash = "sha256:0967c5b86f274800a34a4ed862dfab56928144d03cb18821c5153f8777947796", size = 280173, upload-time = "2025-10-21T15:57:08.206Z" }, + { url = "https://files.pythonhosted.org/packages/8e/d0/d576e1dbd9885bfcd83d0e90762beea48d9373a6f7ed39170f44ed22e336/regex-2025.10.23-cp314-cp314-win_arm64.whl", hash = "sha256:c70dfe58b0a00b36aa04cdb0f798bf3e0adc31747641f69e191109fd8572c9a9", size = 273206, upload-time = "2025-10-21T15:57:10.367Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d0/2025268315e8b2b7b660039824cb7765a41623e97d4cd421510925400487/regex-2025.10.23-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:1f5799ea1787aa6de6c150377d11afad39a38afd033f0c5247aecb997978c422", size = 491854, upload-time = "2025-10-21T15:57:12.526Z" }, + { url = "https://files.pythonhosted.org/packages/44/35/5681c2fec5e8b33454390af209c4353dfc44606bf06d714b0b8bd0454ffe/regex-2025.10.23-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a9639ab7540cfea45ef57d16dcbea2e22de351998d614c3ad2f9778fa3bdd788", size = 292542, upload-time = "2025-10-21T15:57:15.158Z" }, + { url = "https://files.pythonhosted.org/packages/5d/17/184eed05543b724132e4a18149e900f5189001fcfe2d64edaae4fbaf36b4/regex-2025.10.23-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:08f52122c352eb44c3421dab78b9b73a8a77a282cc8314ae576fcaa92b780d10", size = 290903, upload-time = "2025-10-21T15:57:17.108Z" }, + { url = "https://files.pythonhosted.org/packages/25/d0/5e3347aa0db0de382dddfa133a7b0ae72f24b4344f3989398980b44a3924/regex-2025.10.23-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ebf1baebef1c4088ad5a5623decec6b52950f0e4d7a0ae4d48f0a99f8c9cb7d7", size = 807546, upload-time = "2025-10-21T15:57:19.179Z" }, + { url = "https://files.pythonhosted.org/packages/d2/bb/40c589bbdce1be0c55e9f8159789d58d47a22014f2f820cf2b517a5cd193/regex-2025.10.23-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:16b0f1c2e2d566c562d5c384c2b492646be0a19798532fdc1fdedacc66e3223f", size = 873322, upload-time = "2025-10-21T15:57:21.36Z" }, + { url = "https://files.pythonhosted.org/packages/fe/56/a7e40c01575ac93360e606278d359f91829781a9f7fb6e5aa435039edbda/regex-2025.10.23-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7ada5d9dceafaab92646aa00c10a9efd9b09942dd9b0d7c5a4b73db92cc7e61", size = 914855, upload-time = "2025-10-21T15:57:24.044Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4b/d55587b192763db3163c3f508b3b67b31bb6f5e7a0e08b83013d0a59500a/regex-2025.10.23-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a36b4005770044bf08edecc798f0e41a75795b9e7c9c12fe29da8d792ef870c", size = 812724, upload-time = "2025-10-21T15:57:26.123Z" }, + { url = "https://files.pythonhosted.org/packages/33/20/18bac334955fbe99d17229f4f8e98d05e4a501ac03a442be8facbb37c304/regex-2025.10.23-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:af7b2661dcc032da1fae82069b5ebf2ac1dfcd5359ef8b35e1367bfc92181432", size = 795439, upload-time = "2025-10-21T15:57:28.497Z" }, + { url = "https://files.pythonhosted.org/packages/67/46/c57266be9df8549c7d85deb4cb82280cb0019e46fff677534c5fa1badfa4/regex-2025.10.23-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:1cb976810ac1416a67562c2e5ba0accf6f928932320fef302e08100ed681b38e", size = 868336, upload-time = "2025-10-21T15:57:30.867Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f3/bd5879e41ef8187fec5e678e94b526a93f99e7bbe0437b0f2b47f9101694/regex-2025.10.23-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:1a56a54be3897d62f54290190fbcd754bff6932934529fbf5b29933da28fcd43", size = 854567, upload-time = "2025-10-21T15:57:33.062Z" }, + { url = "https://files.pythonhosted.org/packages/e6/57/2b6bbdbd2f24dfed5b028033aa17ad8f7d86bb28f1a892cac8b3bc89d059/regex-2025.10.23-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8f3e6d202fb52c2153f532043bbcf618fd177df47b0b306741eb9b60ba96edc3", size = 799565, upload-time = "2025-10-21T15:57:35.153Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ba/a6168f542ba73b151ed81237adf6b869c7b2f7f8d51618111296674e20ee/regex-2025.10.23-cp314-cp314t-win32.whl", hash = "sha256:1fa1186966b2621b1769fd467c7b22e317e6ba2d2cdcecc42ea3089ef04a8521", size = 274428, upload-time = "2025-10-21T15:57:37.996Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a0/c84475e14a2829e9b0864ebf77c3f7da909df9d8acfe2bb540ff0072047c/regex-2025.10.23-cp314-cp314t-win_amd64.whl", hash = "sha256:08a15d40ce28362eac3e78e83d75475147869c1ff86bc93285f43b4f4431a741", size = 284140, upload-time = "2025-10-21T15:57:40.027Z" }, + { url = "https://files.pythonhosted.org/packages/51/33/6a08ade0eee5b8ba79386869fa6f77afeb835b60510f3525db987e2fffc4/regex-2025.10.23-cp314-cp314t-win_arm64.whl", hash = "sha256:a93e97338e1c8ea2649e130dcfbe8cd69bba5e1e163834752ab64dcb4de6d5ed", size = 274497, upload-time = "2025-10-21T15:57:42.389Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" }, +] + +[[package]] +name = "ruff" +version = "0.12.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/f0/e0965dd709b8cabe6356811c0ee8c096806bb57d20b5019eb4e48a117410/ruff-0.12.12.tar.gz", hash = "sha256:b86cd3415dbe31b3b46a71c598f4c4b2f550346d1ccf6326b347cc0c8fd063d6", size = 5359915, upload-time = "2025-09-04T16:50:18.273Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/09/79/8d3d687224d88367b51c7974cec1040c4b015772bfbeffac95face14c04a/ruff-0.12.12-py3-none-linux_armv6l.whl", hash = "sha256:de1c4b916d98ab289818e55ce481e2cacfaad7710b01d1f990c497edf217dafc", size = 12116602, upload-time = "2025-09-04T16:49:18.892Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c3/6e599657fe192462f94861a09aae935b869aea8a1da07f47d6eae471397c/ruff-0.12.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7acd6045e87fac75a0b0cdedacf9ab3e1ad9d929d149785903cff9bb69ad9727", size = 12868393, upload-time = "2025-09-04T16:49:23.043Z" }, + { url = "https://files.pythonhosted.org/packages/e8/d2/9e3e40d399abc95336b1843f52fc0daaceb672d0e3c9290a28ff1a96f79d/ruff-0.12.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:abf4073688d7d6da16611f2f126be86523a8ec4343d15d276c614bda8ec44edb", size = 12036967, upload-time = "2025-09-04T16:49:26.04Z" }, + { url = "https://files.pythonhosted.org/packages/e9/03/6816b2ed08836be272e87107d905f0908be5b4a40c14bfc91043e76631b8/ruff-0.12.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:968e77094b1d7a576992ac078557d1439df678a34c6fe02fd979f973af167577", size = 12276038, upload-time = "2025-09-04T16:49:29.056Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d5/707b92a61310edf358a389477eabd8af68f375c0ef858194be97ca5b6069/ruff-0.12.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42a67d16e5b1ffc6d21c5f67851e0e769517fb57a8ebad1d0781b30888aa704e", size = 11901110, upload-time = "2025-09-04T16:49:32.07Z" }, + { url = "https://files.pythonhosted.org/packages/9d/3d/f8b1038f4b9822e26ec3d5b49cf2bc313e3c1564cceb4c1a42820bf74853/ruff-0.12.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b216ec0a0674e4b1214dcc998a5088e54eaf39417327b19ffefba1c4a1e4971e", size = 13668352, upload-time = "2025-09-04T16:49:35.148Z" }, + { url = "https://files.pythonhosted.org/packages/98/0e/91421368ae6c4f3765dd41a150f760c5f725516028a6be30e58255e3c668/ruff-0.12.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:59f909c0fdd8f1dcdbfed0b9569b8bf428cf144bec87d9de298dcd4723f5bee8", size = 14638365, upload-time = "2025-09-04T16:49:38.892Z" }, + { url = "https://files.pythonhosted.org/packages/74/5d/88f3f06a142f58ecc8ecb0c2fe0b82343e2a2b04dcd098809f717cf74b6c/ruff-0.12.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ac93d87047e765336f0c18eacad51dad0c1c33c9df7484c40f98e1d773876f5", size = 14060812, upload-time = "2025-09-04T16:49:42.732Z" }, + { url = "https://files.pythonhosted.org/packages/13/fc/8962e7ddd2e81863d5c92400820f650b86f97ff919c59836fbc4c1a6d84c/ruff-0.12.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01543c137fd3650d322922e8b14cc133b8ea734617c4891c5a9fccf4bfc9aa92", size = 13050208, upload-time = "2025-09-04T16:49:46.434Z" }, + { url = "https://files.pythonhosted.org/packages/53/06/8deb52d48a9a624fd37390555d9589e719eac568c020b27e96eed671f25f/ruff-0.12.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afc2fa864197634e549d87fb1e7b6feb01df0a80fd510d6489e1ce8c0b1cc45", size = 13311444, upload-time = "2025-09-04T16:49:49.931Z" }, + { url = "https://files.pythonhosted.org/packages/2a/81/de5a29af7eb8f341f8140867ffb93f82e4fde7256dadee79016ac87c2716/ruff-0.12.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0c0945246f5ad776cb8925e36af2438e66188d2b57d9cf2eed2c382c58b371e5", size = 13279474, upload-time = "2025-09-04T16:49:53.465Z" }, + { url = "https://files.pythonhosted.org/packages/7f/14/d9577fdeaf791737ada1b4f5c6b59c21c3326f3f683229096cccd7674e0c/ruff-0.12.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0fbafe8c58e37aae28b84a80ba1817f2ea552e9450156018a478bf1fa80f4e4", size = 12070204, upload-time = "2025-09-04T16:49:56.882Z" }, + { url = "https://files.pythonhosted.org/packages/77/04/a910078284b47fad54506dc0af13839c418ff704e341c176f64e1127e461/ruff-0.12.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b9c456fb2fc8e1282affa932c9e40f5ec31ec9cbb66751a316bd131273b57c23", size = 11880347, upload-time = "2025-09-04T16:49:59.729Z" }, + { url = "https://files.pythonhosted.org/packages/df/58/30185fcb0e89f05e7ea82e5817b47798f7fa7179863f9d9ba6fd4fe1b098/ruff-0.12.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f12856123b0ad0147d90b3961f5c90e7427f9acd4b40050705499c98983f489", size = 12891844, upload-time = "2025-09-04T16:50:02.591Z" }, + { url = "https://files.pythonhosted.org/packages/21/9c/28a8dacce4855e6703dcb8cdf6c1705d0b23dd01d60150786cd55aa93b16/ruff-0.12.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:26a1b5a2bf7dd2c47e3b46d077cd9c0fc3b93e6c6cc9ed750bd312ae9dc302ee", size = 13360687, upload-time = "2025-09-04T16:50:05.8Z" }, + { url = "https://files.pythonhosted.org/packages/c8/fa/05b6428a008e60f79546c943e54068316f32ec8ab5c4f73e4563934fbdc7/ruff-0.12.12-py3-none-win32.whl", hash = "sha256:173be2bfc142af07a01e3a759aba6f7791aa47acf3604f610b1c36db888df7b1", size = 12052870, upload-time = "2025-09-04T16:50:09.121Z" }, + { url = "https://files.pythonhosted.org/packages/85/60/d1e335417804df452589271818749d061b22772b87efda88354cf35cdb7a/ruff-0.12.12-py3-none-win_amd64.whl", hash = "sha256:e99620bf01884e5f38611934c09dd194eb665b0109104acae3ba6102b600fd0d", size = 13178016, upload-time = "2025-09-04T16:50:12.559Z" }, + { url = "https://files.pythonhosted.org/packages/28/7e/61c42657f6e4614a4258f1c3b0c5b93adc4d1f8575f5229d1906b483099b/ruff-0.12.12-py3-none-win_arm64.whl", hash = "sha256:2a8199cab4ce4d72d158319b63370abf60991495fb733db96cd923a34c52d093", size = 12256762, upload-time = "2025-09-04T16:50:15.737Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "syrupy" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8c/f8/022d8704a3314f3e96dbd6bbd16ebe119ce30e35f41aabfa92345652fceb/syrupy-4.9.1.tar.gz", hash = "sha256:b7d0fcadad80a7d2f6c4c71917918e8ebe2483e8c703dfc8d49cdbb01081f9a4", size = 52492, upload-time = "2025-03-24T01:36:37.225Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/9d/aef9ec5fd5a4ee2f6a96032c4eda5888c5c7cec65cef6b28c4fc37671d88/syrupy-4.9.1-py3-none-any.whl", hash = "sha256:b94cc12ed0e5e75b448255430af642516842a2374a46936dd2650cfb6dd20eda", size = 52214, upload-time = "2025-03-24T01:36:35.278Z" }, +] + +[[package]] +name = "tenacity" +version = "9.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, +] + +[[package]] +name = "tiktoken" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/b3/2cb7c17b6c4cf8ca983204255d3f1d95eda7213e247e6947a0ee2c747a2c/tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3de02f5a491cfd179aec916eddb70331814bd6bf764075d39e21d5862e533970", size = 1051991, upload-time = "2025-10-06T20:21:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/27/0f/df139f1df5f6167194ee5ab24634582ba9a1b62c6b996472b0277ec80f66/tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6cfb6d9b7b54d20af21a912bfe63a2727d9cfa8fbda642fd8322c70340aad16", size = 995798, upload-time = "2025-10-06T20:21:35.579Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5d/26a691f28ab220d5edc09b9b787399b130f24327ef824de15e5d85ef21aa/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:cde24cdb1b8a08368f709124f15b36ab5524aac5fa830cc3fdce9c03d4fb8030", size = 1129865, upload-time = "2025-10-06T20:21:36.675Z" }, + { url = "https://files.pythonhosted.org/packages/b2/94/443fab3d4e5ebecac895712abd3849b8da93b7b7dec61c7db5c9c7ebe40c/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6de0da39f605992649b9cfa6f84071e3f9ef2cec458d08c5feb1b6f0ff62e134", size = 1152856, upload-time = "2025-10-06T20:21:37.873Z" }, + { url = "https://files.pythonhosted.org/packages/54/35/388f941251b2521c70dd4c5958e598ea6d2c88e28445d2fb8189eecc1dfc/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6faa0534e0eefbcafaccb75927a4a380463a2eaa7e26000f0173b920e98b720a", size = 1195308, upload-time = "2025-10-06T20:21:39.577Z" }, + { url = "https://files.pythonhosted.org/packages/f8/00/c6681c7f833dd410576183715a530437a9873fa910265817081f65f9105f/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:82991e04fc860afb933efb63957affc7ad54f83e2216fe7d319007dab1ba5892", size = 1255697, upload-time = "2025-10-06T20:21:41.154Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d2/82e795a6a9bafa034bf26a58e68fe9a89eeaaa610d51dbeb22106ba04f0a/tiktoken-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:6fb2995b487c2e31acf0a9e17647e3b242235a20832642bb7a9d1a181c0c1bb1", size = 879375, upload-time = "2025-10-06T20:21:43.201Z" }, + { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, + { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, +] + +[[package]] +name = "toml" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253, upload-time = "2020-11-01T01:40:22.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" }, +] + +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, +] + +[[package]] +name = "types-toml" +version = "0.10.8.20240310" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/47/3e4c75042792bff8e90d7991aa5c51812cc668828cc6cce711e97f63a607/types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331", size = 4392, upload-time = "2024-03-10T02:18:37.518Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/a2/d32ab58c0b216912638b140ab2170ee4b8644067c293b170e19fba340ccc/types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d", size = 4777, upload-time = "2024-03-10T02:18:36.568Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "watchdog" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/56/90994d789c61df619bfc5ce2ecdabd5eeff564e1eb47512bd01b5e019569/watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26", size = 96390, upload-time = "2024-11-01T14:06:24.793Z" }, + { url = "https://files.pythonhosted.org/packages/55/46/9a67ee697342ddf3c6daa97e3a587a56d6c4052f881ed926a849fcf7371c/watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112", size = 88389, upload-time = "2024-11-01T14:06:27.112Z" }, + { url = "https://files.pythonhosted.org/packages/44/65/91b0985747c52064d8701e1075eb96f8c40a79df889e59a399453adfb882/watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3", size = 89020, upload-time = "2024-11-01T14:06:29.876Z" }, + { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" }, + { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" }, + { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" }, + { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" }, + { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" }, + { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" }, + { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" }, + { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" }, + { url = "https://files.pythonhosted.org/packages/30/ad/d17b5d42e28a8b91f8ed01cb949da092827afb9995d4559fd448d0472763/watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881", size = 87902, upload-time = "2024-11-01T14:06:53.119Z" }, + { url = "https://files.pythonhosted.org/packages/5c/ca/c3649991d140ff6ab67bfc85ab42b165ead119c9e12211e08089d763ece5/watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11", size = 88380, upload-time = "2024-11-01T14:06:55.19Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, + { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, + { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" }, + { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, + { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, +] + +[[package]] +name = "xxhash" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/ee/f9f1d656ad168681bb0f6b092372c1e533c4416b8069b1896a175c46e484/xxhash-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:87ff03d7e35c61435976554477a7f4cd1704c3596a89a8300d5ce7fc83874a71", size = 32845, upload-time = "2025-10-02T14:33:51.573Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/93508d9460b292c74a09b83d16750c52a0ead89c51eea9951cb97a60d959/xxhash-3.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f572dfd3d0e2eb1a57511831cf6341242f5a9f8298a45862d085f5b93394a27d", size = 30807, upload-time = "2025-10-02T14:33:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/07/55/28c93a3662f2d200c70704efe74aab9640e824f8ce330d8d3943bf7c9b3c/xxhash-3.6.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:89952ea539566b9fed2bbd94e589672794b4286f342254fad28b149f9615fef8", size = 193786, upload-time = "2025-10-02T14:33:54.272Z" }, + { url = "https://files.pythonhosted.org/packages/c1/96/fec0be9bb4b8f5d9c57d76380a366f31a1781fb802f76fc7cda6c84893c7/xxhash-3.6.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e6f2ffb07a50b52465a1032c3cf1f4a5683f944acaca8a134a2f23674c2058", size = 212830, upload-time = "2025-10-02T14:33:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/c706845ba77b9611f81fd2e93fad9859346b026e8445e76f8c6fd057cc6d/xxhash-3.6.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b5b848ad6c16d308c3ac7ad4ba6bede80ed5df2ba8ed382f8932df63158dd4b2", size = 211606, upload-time = "2025-10-02T14:33:57.133Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/164126a2999e5045f04a69257eea946c0dc3e86541b400d4385d646b53d7/xxhash-3.6.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a034590a727b44dd8ac5914236a7b8504144447a9682586c3327e935f33ec8cc", size = 444872, upload-time = "2025-10-02T14:33:58.446Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4b/55ab404c56cd70a2cf5ecfe484838865d0fea5627365c6c8ca156bd09c8f/xxhash-3.6.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a8f1972e75ebdd161d7896743122834fe87378160c20e97f8b09166213bf8cc", size = 193217, upload-time = "2025-10-02T14:33:59.724Z" }, + { url = "https://files.pythonhosted.org/packages/45/e6/52abf06bac316db33aa269091ae7311bd53cfc6f4b120ae77bac1b348091/xxhash-3.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ee34327b187f002a596d7b167ebc59a1b729e963ce645964bbc050d2f1b73d07", size = 210139, upload-time = "2025-10-02T14:34:02.041Z" }, + { url = "https://files.pythonhosted.org/packages/34/37/db94d490b8691236d356bc249c08819cbcef9273a1a30acf1254ff9ce157/xxhash-3.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:339f518c3c7a850dd033ab416ea25a692759dc7478a71131fe8869010d2b75e4", size = 197669, upload-time = "2025-10-02T14:34:03.664Z" }, + { url = "https://files.pythonhosted.org/packages/b7/36/c4f219ef4a17a4f7a64ed3569bc2b5a9c8311abdb22249ac96093625b1a4/xxhash-3.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:bf48889c9630542d4709192578aebbd836177c9f7a4a2778a7d6340107c65f06", size = 210018, upload-time = "2025-10-02T14:34:05.325Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/bfac889a374fc2fc439a69223d1750eed2e18a7db8514737ab630534fa08/xxhash-3.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5576b002a56207f640636056b4160a378fe36a58db73ae5c27a7ec8db35f71d4", size = 413058, upload-time = "2025-10-02T14:34:06.925Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d1/555d8447e0dd32ad0930a249a522bb2e289f0d08b6b16204cfa42c1f5a0c/xxhash-3.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af1f3278bd02814d6dedc5dec397993b549d6f16c19379721e5a1d31e132c49b", size = 190628, upload-time = "2025-10-02T14:34:08.669Z" }, + { url = "https://files.pythonhosted.org/packages/d1/15/8751330b5186cedc4ed4b597989882ea05e0408b53fa47bcb46a6125bfc6/xxhash-3.6.0-cp310-cp310-win32.whl", hash = "sha256:aed058764db109dc9052720da65fafe84873b05eb8b07e5e653597951af57c3b", size = 30577, upload-time = "2025-10-02T14:34:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/bb/cc/53f87e8b5871a6eb2ff7e89c48c66093bda2be52315a8161ddc54ea550c4/xxhash-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e82da5670f2d0d98950317f82a0e4a0197150ff19a6df2ba40399c2a3b9ae5fb", size = 31487, upload-time = "2025-10-02T14:34:11.618Z" }, + { url = "https://files.pythonhosted.org/packages/9f/00/60f9ea3bb697667a14314d7269956f58bf56bb73864f8f8d52a3c2535e9a/xxhash-3.6.0-cp310-cp310-win_arm64.whl", hash = "sha256:4a082ffff8c6ac07707fb6b671caf7c6e020c75226c561830b73d862060f281d", size = 27863, upload-time = "2025-10-02T14:34:12.619Z" }, + { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, + { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, + { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, + { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, + { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, + { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, + { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, + { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, + { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, + { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, + { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, + { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, + { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, + { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, + { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, + { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, + { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, + { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, + { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, + { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, + { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, + { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, + { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, + { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, + { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, + { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, + { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, + { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, + { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, +] + +[[package]] +name = "zstandard" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/7a/28efd1d371f1acd037ac64ed1c5e2b41514a6cc937dd6ab6a13ab9f0702f/zstandard-0.25.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e59fdc271772f6686e01e1b3b74537259800f57e24280be3f29c8a0deb1904dd", size = 795256, upload-time = "2025-09-14T22:15:56.415Z" }, + { url = "https://files.pythonhosted.org/packages/96/34/ef34ef77f1ee38fc8e4f9775217a613b452916e633c4f1d98f31db52c4a5/zstandard-0.25.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4d441506e9b372386a5271c64125f72d5df6d2a8e8a2a45a0ae09b03cb781ef7", size = 640565, upload-time = "2025-09-14T22:15:58.177Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1b/4fdb2c12eb58f31f28c4d28e8dc36611dd7205df8452e63f52fb6261d13e/zstandard-0.25.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:ab85470ab54c2cb96e176f40342d9ed41e58ca5733be6a893b730e7af9c40550", size = 5345306, upload-time = "2025-09-14T22:16:00.165Z" }, + { url = "https://files.pythonhosted.org/packages/73/28/a44bdece01bca027b079f0e00be3b6bd89a4df180071da59a3dd7381665b/zstandard-0.25.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e05ab82ea7753354bb054b92e2f288afb750e6b439ff6ca78af52939ebbc476d", size = 5055561, upload-time = "2025-09-14T22:16:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/e9/74/68341185a4f32b274e0fc3410d5ad0750497e1acc20bd0f5b5f64ce17785/zstandard-0.25.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:78228d8a6a1c177a96b94f7e2e8d012c55f9c760761980da16ae7546a15a8e9b", size = 5402214, upload-time = "2025-09-14T22:16:04.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/67/f92e64e748fd6aaffe01e2b75a083c0c4fd27abe1c8747fee4555fcee7dd/zstandard-0.25.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:2b6bd67528ee8b5c5f10255735abc21aa106931f0dbaf297c7be0c886353c3d0", size = 5449703, upload-time = "2025-09-14T22:16:06.312Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e5/6d36f92a197c3c17729a2125e29c169f460538a7d939a27eaaa6dcfcba8e/zstandard-0.25.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4b6d83057e713ff235a12e73916b6d356e3084fd3d14ced499d84240f3eecee0", size = 5556583, upload-time = "2025-09-14T22:16:08.457Z" }, + { url = "https://files.pythonhosted.org/packages/d7/83/41939e60d8d7ebfe2b747be022d0806953799140a702b90ffe214d557638/zstandard-0.25.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9174f4ed06f790a6869b41cba05b43eeb9a35f8993c4422ab853b705e8112bbd", size = 5045332, upload-time = "2025-09-14T22:16:10.444Z" }, + { url = "https://files.pythonhosted.org/packages/b3/87/d3ee185e3d1aa0133399893697ae91f221fda79deb61adbe998a7235c43f/zstandard-0.25.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:25f8f3cd45087d089aef5ba3848cd9efe3ad41163d3400862fb42f81a3a46701", size = 5572283, upload-time = "2025-09-14T22:16:12.128Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1d/58635ae6104df96671076ac7d4ae7816838ce7debd94aecf83e30b7121b0/zstandard-0.25.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3756b3e9da9b83da1796f8809dd57cb024f838b9eeafde28f3cb472012797ac1", size = 4959754, upload-time = "2025-09-14T22:16:14.225Z" }, + { url = "https://files.pythonhosted.org/packages/75/d6/57e9cb0a9983e9a229dd8fd2e6e96593ef2aa82a3907188436f22b111ccd/zstandard-0.25.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:81dad8d145d8fd981b2962b686b2241d3a1ea07733e76a2f15435dfb7fb60150", size = 5266477, upload-time = "2025-09-14T22:16:16.343Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a9/ee891e5edf33a6ebce0a028726f0bbd8567effe20fe3d5808c42323e8542/zstandard-0.25.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a5a419712cf88862a45a23def0ae063686db3d324cec7edbe40509d1a79a0aab", size = 5440914, upload-time = "2025-09-14T22:16:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/58/08/a8522c28c08031a9521f27abc6f78dbdee7312a7463dd2cfc658b813323b/zstandard-0.25.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e7360eae90809efd19b886e59a09dad07da4ca9ba096752e61a2e03c8aca188e", size = 5819847, upload-time = "2025-09-14T22:16:20.559Z" }, + { url = "https://files.pythonhosted.org/packages/6f/11/4c91411805c3f7b6f31c60e78ce347ca48f6f16d552fc659af6ec3b73202/zstandard-0.25.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:75ffc32a569fb049499e63ce68c743155477610532da1eb38e7f24bf7cd29e74", size = 5363131, upload-time = "2025-09-14T22:16:22.206Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d6/8c4bd38a3b24c4c7676a7a3d8de85d6ee7a983602a734b9f9cdefb04a5d6/zstandard-0.25.0-cp310-cp310-win32.whl", hash = "sha256:106281ae350e494f4ac8a80470e66d1fe27e497052c8d9c3b95dc4cf1ade81aa", size = 436469, upload-time = "2025-09-14T22:16:25.002Z" }, + { url = "https://files.pythonhosted.org/packages/93/90/96d50ad417a8ace5f841b3228e93d1bb13e6ad356737f42e2dde30d8bd68/zstandard-0.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea9d54cc3d8064260114a0bbf3479fc4a98b21dffc89b3459edd506b69262f6e", size = 506100, upload-time = "2025-09-14T22:16:23.569Z" }, + { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, + { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, + { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, + { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, + { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, + { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, + { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" }, + { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" }, + { url = "https://files.pythonhosted.org/packages/35/0b/8df9c4ad06af91d39e94fa96cc010a24ac4ef1378d3efab9223cc8593d40/zstandard-0.25.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec996f12524f88e151c339688c3897194821d7f03081ab35d31d1e12ec975e94", size = 795735, upload-time = "2025-09-14T22:17:26.042Z" }, + { url = "https://files.pythonhosted.org/packages/3f/06/9ae96a3e5dcfd119377ba33d4c42a7d89da1efabd5cb3e366b156c45ff4d/zstandard-0.25.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a1a4ae2dec3993a32247995bdfe367fc3266da832d82f8438c8570f989753de1", size = 640440, upload-time = "2025-09-14T22:17:27.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/14/933d27204c2bd404229c69f445862454dcc101cd69ef8c6068f15aaec12c/zstandard-0.25.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:e96594a5537722fdfb79951672a2a63aec5ebfb823e7560586f7484819f2a08f", size = 5343070, upload-time = "2025-09-14T22:17:28.896Z" }, + { url = "https://files.pythonhosted.org/packages/6d/db/ddb11011826ed7db9d0e485d13df79b58586bfdec56e5c84a928a9a78c1c/zstandard-0.25.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bfc4e20784722098822e3eee42b8e576b379ed72cca4a7cb856ae733e62192ea", size = 5063001, upload-time = "2025-09-14T22:17:31.044Z" }, + { url = "https://files.pythonhosted.org/packages/db/00/87466ea3f99599d02a5238498b87bf84a6348290c19571051839ca943777/zstandard-0.25.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:457ed498fc58cdc12fc48f7950e02740d4f7ae9493dd4ab2168a47c93c31298e", size = 5394120, upload-time = "2025-09-14T22:17:32.711Z" }, + { url = "https://files.pythonhosted.org/packages/2b/95/fc5531d9c618a679a20ff6c29e2b3ef1d1f4ad66c5e161ae6ff847d102a9/zstandard-0.25.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:fd7a5004eb1980d3cefe26b2685bcb0b17989901a70a1040d1ac86f1d898c551", size = 5451230, upload-time = "2025-09-14T22:17:34.41Z" }, + { url = "https://files.pythonhosted.org/packages/63/4b/e3678b4e776db00f9f7b2fe58e547e8928ef32727d7a1ff01dea010f3f13/zstandard-0.25.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8e735494da3db08694d26480f1493ad2cf86e99bdd53e8e9771b2752a5c0246a", size = 5547173, upload-time = "2025-09-14T22:17:36.084Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d5/ba05ed95c6b8ec30bd468dfeab20589f2cf709b5c940483e31d991f2ca58/zstandard-0.25.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3a39c94ad7866160a4a46d772e43311a743c316942037671beb264e395bdd611", size = 5046736, upload-time = "2025-09-14T22:17:37.891Z" }, + { url = "https://files.pythonhosted.org/packages/50/d5/870aa06b3a76c73eced65c044b92286a3c4e00554005ff51962deef28e28/zstandard-0.25.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:172de1f06947577d3a3005416977cce6168f2261284c02080e7ad0185faeced3", size = 5576368, upload-time = "2025-09-14T22:17:40.206Z" }, + { url = "https://files.pythonhosted.org/packages/5d/35/398dc2ffc89d304d59bc12f0fdd931b4ce455bddf7038a0a67733a25f550/zstandard-0.25.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c83b0188c852a47cd13ef3bf9209fb0a77fa5374958b8c53aaa699398c6bd7b", size = 4954022, upload-time = "2025-09-14T22:17:41.879Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5c/36ba1e5507d56d2213202ec2b05e8541734af5f2ce378c5d1ceaf4d88dc4/zstandard-0.25.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1673b7199bbe763365b81a4f3252b8e80f44c9e323fc42940dc8843bfeaf9851", size = 5267889, upload-time = "2025-09-14T22:17:43.577Z" }, + { url = "https://files.pythonhosted.org/packages/70/e8/2ec6b6fb7358b2ec0113ae202647ca7c0e9d15b61c005ae5225ad0995df5/zstandard-0.25.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0be7622c37c183406f3dbf0cba104118eb16a4ea7359eeb5752f0794882fc250", size = 5433952, upload-time = "2025-09-14T22:17:45.271Z" }, + { url = "https://files.pythonhosted.org/packages/7b/01/b5f4d4dbc59ef193e870495c6f1275f5b2928e01ff5a81fecb22a06e22fb/zstandard-0.25.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5f5e4c2a23ca271c218ac025bd7d635597048b366d6f31f420aaeb715239fc98", size = 5814054, upload-time = "2025-09-14T22:17:47.08Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/fbd822d5c6f427cf158316d012c5a12f233473c2f9c5fe5ab1ae5d21f3d8/zstandard-0.25.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f187a0bb61b35119d1926aee039524d1f93aaf38a9916b8c4b78ac8514a0aaf", size = 5360113, upload-time = "2025-09-14T22:17:48.893Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/69a553d2047f9a2c7347caa225bb3a63b6d7704ad74610cb7823baa08ed7/zstandard-0.25.0-cp313-cp313-win32.whl", hash = "sha256:7030defa83eef3e51ff26f0b7bfb229f0204b66fe18e04359ce3474ac33cbc09", size = 436936, upload-time = "2025-09-14T22:17:52.658Z" }, + { url = "https://files.pythonhosted.org/packages/d9/82/b9c06c870f3bd8767c201f1edbdf9e8dc34be5b0fbc5682c4f80fe948475/zstandard-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:1f830a0dac88719af0ae43b8b2d6aef487d437036468ef3c2ea59c51f9d55fd5", size = 506232, upload-time = "2025-09-14T22:17:50.402Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/60c3c01243bb81d381c9916e2a6d9e149ab8627c0c7d7abb2d73384b3c0c/zstandard-0.25.0-cp313-cp313-win_arm64.whl", hash = "sha256:85304a43f4d513f5464ceb938aa02c1e78c2943b29f44a750b48b25ac999a049", size = 462671, upload-time = "2025-09-14T22:17:51.533Z" }, + { url = "https://files.pythonhosted.org/packages/3d/5c/f8923b595b55fe49e30612987ad8bf053aef555c14f05bb659dd5dbe3e8a/zstandard-0.25.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e29f0cf06974c899b2c188ef7f783607dbef36da4c242eb6c82dcd8b512855e3", size = 795887, upload-time = "2025-09-14T22:17:54.198Z" }, + { url = "https://files.pythonhosted.org/packages/8d/09/d0a2a14fc3439c5f874042dca72a79c70a532090b7ba0003be73fee37ae2/zstandard-0.25.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:05df5136bc5a011f33cd25bc9f506e7426c0c9b3f9954f056831ce68f3b6689f", size = 640658, upload-time = "2025-09-14T22:17:55.423Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/8b6b71b1ddd517f68ffb55e10834388d4f793c49c6b83effaaa05785b0b4/zstandard-0.25.0-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:f604efd28f239cc21b3adb53eb061e2a205dc164be408e553b41ba2ffe0ca15c", size = 5379849, upload-time = "2025-09-14T22:17:57.372Z" }, + { url = "https://files.pythonhosted.org/packages/a4/86/a48e56320d0a17189ab7a42645387334fba2200e904ee47fc5a26c1fd8ca/zstandard-0.25.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223415140608d0f0da010499eaa8ccdb9af210a543fac54bce15babbcfc78439", size = 5058095, upload-time = "2025-09-14T22:17:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ad/eb659984ee2c0a779f9d06dbfe45e2dc39d99ff40a319895df2d3d9a48e5/zstandard-0.25.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e54296a283f3ab5a26fc9b8b5d4978ea0532f37b231644f367aa588930aa043", size = 5551751, upload-time = "2025-09-14T22:18:01.618Z" }, + { url = "https://files.pythonhosted.org/packages/61/b3/b637faea43677eb7bd42ab204dfb7053bd5c4582bfe6b1baefa80ac0c47b/zstandard-0.25.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca54090275939dc8ec5dea2d2afb400e0f83444b2fc24e07df7fdef677110859", size = 6364818, upload-time = "2025-09-14T22:18:03.769Z" }, + { url = "https://files.pythonhosted.org/packages/31/dc/cc50210e11e465c975462439a492516a73300ab8caa8f5e0902544fd748b/zstandard-0.25.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e09bb6252b6476d8d56100e8147b803befa9a12cea144bbe629dd508800d1ad0", size = 5560402, upload-time = "2025-09-14T22:18:05.954Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ae/56523ae9c142f0c08efd5e868a6da613ae76614eca1305259c3bf6a0ed43/zstandard-0.25.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a9ec8c642d1ec73287ae3e726792dd86c96f5681eb8df274a757bf62b750eae7", size = 4955108, upload-time = "2025-09-14T22:18:07.68Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/c899f2d6df0840d5e384cf4c4121458c72802e8bda19691f3b16619f51e9/zstandard-0.25.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a4089a10e598eae6393756b036e0f419e8c1d60f44a831520f9af41c14216cf2", size = 5269248, upload-time = "2025-09-14T22:18:09.753Z" }, + { url = "https://files.pythonhosted.org/packages/1b/c0/59e912a531d91e1c192d3085fc0f6fb2852753c301a812d856d857ea03c6/zstandard-0.25.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f67e8f1a324a900e75b5e28ffb152bcac9fbed1cc7b43f99cd90f395c4375344", size = 5430330, upload-time = "2025-09-14T22:18:11.966Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/7e31db1240de2df22a58e2ea9a93fc6e38cc29353e660c0272b6735d6669/zstandard-0.25.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:9654dbc012d8b06fc3d19cc825af3f7bf8ae242226df5f83936cb39f5fdc846c", size = 5811123, upload-time = "2025-09-14T22:18:13.907Z" }, + { url = "https://files.pythonhosted.org/packages/f6/49/fac46df5ad353d50535e118d6983069df68ca5908d4d65b8c466150a4ff1/zstandard-0.25.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4203ce3b31aec23012d3a4cf4a2ed64d12fea5269c49aed5e4c3611b938e4088", size = 5359591, upload-time = "2025-09-14T22:18:16.465Z" }, + { url = "https://files.pythonhosted.org/packages/c2/38/f249a2050ad1eea0bb364046153942e34abba95dd5520af199aed86fbb49/zstandard-0.25.0-cp314-cp314-win32.whl", hash = "sha256:da469dc041701583e34de852d8634703550348d5822e66a0c827d39b05365b12", size = 444513, upload-time = "2025-09-14T22:18:20.61Z" }, + { url = "https://files.pythonhosted.org/packages/3a/43/241f9615bcf8ba8903b3f0432da069e857fc4fd1783bd26183db53c4804b/zstandard-0.25.0-cp314-cp314-win_amd64.whl", hash = "sha256:c19bcdd826e95671065f8692b5a4aa95c52dc7a02a4c5a0cac46deb879a017a2", size = 516118, upload-time = "2025-09-14T22:18:17.849Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ef/da163ce2450ed4febf6467d77ccb4cd52c4c30ab45624bad26ca0a27260c/zstandard-0.25.0-cp314-cp314-win_arm64.whl", hash = "sha256:d7541afd73985c630bafcd6338d2518ae96060075f9463d7dc14cfb33514383d", size = 476940, upload-time = "2025-09-14T22:18:19.088Z" }, +] diff --git a/libs/partners/README.md b/libs/partners/README.md index 01ae4de3848..73ba9682239 100644 --- a/libs/partners/README.md +++ b/libs/partners/README.md @@ -1,3 +1,7 @@ # FAQ Looking for an integration not listed here? Check out the [integrations documentation](https://docs.langchain.com/oss/python/integrations/providers) and the [note](../README.md) in the `libs/` README about third-party maintained packages. + +## Integration docs + +For full documentation, see the [primary](https://docs.langchain.com/oss/python/integrations/providers/overview) and [API reference](https://reference.langchain.com/python/integrations/) docs for integrations. diff --git a/libs/partners/anthropic/Makefile b/libs/partners/anthropic/Makefile index bcb58a772e2..6226e95756a 100644 --- a/libs/partners/anthropic/Makefile +++ b/libs/partners/anthropic/Makefile @@ -14,7 +14,7 @@ test tests: uv run --group test pytest -vvv --disable-socket --allow-unix-socket $(TEST_FILE) integration_test integration_tests: - uv run --group test --group test_integration pytest -vvv --timeout 30 $(TEST_FILE) + uv run --group test --group test_integration pytest -n auto -vvv --timeout 30 $(TEST_FILE) test_watch: uv run --group test ptw --snapshot-update --now . -- -vv $(TEST_FILE) diff --git a/libs/partners/anthropic/README.md b/libs/partners/anthropic/README.md index e844819d82a..3279b87b0f6 100644 --- a/libs/partners/anthropic/README.md +++ b/libs/partners/anthropic/README.md @@ -1,5 +1,22 @@ # langchain-anthropic +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-anthropic?label=%20)](https://pypi.org/project/langchain-anthropic/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-anthropic)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-anthropic)](https://pypistats.org/packages/langchain-anthropic) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-anthropic +``` + +## πŸ€” What is this? + This package contains the LangChain integration for Anthropic's generative models. +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/anthropic) for more details. diff --git a/libs/partners/anthropic/langchain_anthropic/_compat.py b/libs/partners/anthropic/langchain_anthropic/_compat.py index c86ceef3dc0..d16f5c77274 100644 --- a/libs/partners/anthropic/langchain_anthropic/_compat.py +++ b/libs/partners/anthropic/langchain_anthropic/_compat.py @@ -22,8 +22,7 @@ def _convert_annotation_from_v1(annotation: types.Annotation) -> dict[str, Any]: if "title" in annotation: out["title"] = annotation["title"] out["type"] = "web_search_result_location" - if "url" in annotation: - out["url"] = annotation["url"] + out["url"] = annotation.get("url") for key, value in annotation.get("extras", {}).items(): if key not in out: diff --git a/libs/partners/anthropic/langchain_anthropic/chat_models.py b/libs/partners/anthropic/langchain_anthropic/chat_models.py index fb494fffcab..f93456d88dc 100644 --- a/libs/partners/anthropic/langchain_anthropic/chat_models.py +++ b/libs/partners/anthropic/langchain_anthropic/chat_models.py @@ -18,10 +18,7 @@ from langchain_core.callbacks import ( ) from langchain_core.exceptions import OutputParserException from langchain_core.language_models import LanguageModelInput -from langchain_core.language_models.chat_models import ( - BaseChatModel, - LangSmithParams, -) +from langchain_core.language_models.chat_models import BaseChatModel, LangSmithParams from langchain_core.messages import ( AIMessage, AIMessageChunk, @@ -63,13 +60,15 @@ _message_type_lookups = { _MODEL_DEFAULT_MAX_OUTPUT_TOKENS: Final[dict[str, int]] = { - "claude-opus-4-1": 32000, - "claude-opus-4": 32000, - "claude-sonnet-4": 64000, - "claude-3-7-sonnet": 64000, - "claude-3-5-sonnet": 8192, - "claude-3-5-haiku": 8192, - "claude-3-haiku": 4096, + # Listed old to new + "claude-3-haiku": 4096, # Claude Haiku 3 + "claude-3-5-haiku": 8192, # Claude Haiku 3.5 + "claude-3-7-sonnet": 64000, # Claude Sonnet 3.7 + "claude-sonnet-4": 64000, # Claude Sonnet 4 + "claude-opus-4": 32000, # Claude Opus 4 + "claude-opus-4-1": 32000, # Claude Opus 4.1 + "claude-sonnet-4-5": 64000, # Claude Sonnet 4.5 + "claude-haiku-4-5": 64000, # Claude Haiku 4.5 } _FALLBACK_MAX_OUTPUT_TOKENS: Final[int] = 4096 @@ -77,7 +76,7 @@ _FALLBACK_MAX_OUTPUT_TOKENS: Final[int] = 4096 def _default_max_tokens_for(model: str | None) -> int: """Return the default max output tokens for an Anthropic model (with fallback). - Can find the Max Tokens limits here: https://docs.anthropic.com/en/docs/about-claude/models/overview#model-comparison-table + See the Claude docs for [Max Tokens limits](https://docs.claude.com/en/docs/about-claude/models/overview#model-comparison-table). """ if not model: return _FALLBACK_MAX_OUTPUT_TOKENS @@ -92,15 +91,18 @@ class AnthropicTool(TypedDict): """Anthropic tool definition.""" name: str + input_schema: dict[str, Any] + description: NotRequired[str] + cache_control: NotRequired[dict[str, str]] def _is_builtin_tool(tool: Any) -> bool: """Check if a tool is a built-in Anthropic tool. - https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/overview + [Claude docs for built-in tools](https://docs.claude.com/en/docs/agents-and-tools/tool-use/overview) """ if not isinstance(tool, dict): return False @@ -569,8 +571,8 @@ def _handle_anthropic_bad_request(e: anthropic.BadRequestError) -> None: class ChatAnthropic(BaseChatModel): """Anthropic chat models. - See [Anthropic's docs](https://docs.anthropic.com/en/docs/about-claude/models/overview) for a - list of the latest models. + See [Anthropic's docs](https://docs.claude.com/en/docs/about-claude/models/overview) + for a list of the latest models. Setup: Install `langchain-anthropic` and set environment variable `ANTHROPIC_API_KEY`. @@ -581,25 +583,25 @@ class ChatAnthropic(BaseChatModel): ``` Key init args β€” completion params: - model: str - Name of Anthropic model to use. e.g. `'claude-3-7-sonnet-20250219'`. - temperature: float + model: + Name of Anthropic model to use. e.g. `'claude-sonnet-4-5-20250929'`. + temperature: Sampling temperature. Ranges from `0.0` to `1.0`. - max_tokens: int + max_tokens: Max number of tokens to generate. Key init args β€” client params: - timeout: float | None + timeout: Timeout for requests. - anthropic_proxy: str | None + anthropic_proxy: Proxy to use for the Anthropic clients, will be used for every API call. If not passed in will be read from env var `ANTHROPIC_PROXY`. - max_retries: int + max_retries: Max number of retries if a request fails. - api_key: str | None + api_key: Anthropic API key. If not passed in will be read from env var `ANTHROPIC_API_KEY`. - base_url: str | None + base_url: Base URL for API requests. Only specify if using a proxy or service emulator. @@ -609,8 +611,8 @@ class ChatAnthropic(BaseChatModel): ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic( - model="claude-3-7-sonnet-20250219", + model = ChatAnthropic( + model="claude-sonnet-4-5-20250929", temperature=0, max_tokens=1024, timeout=None, @@ -650,7 +652,7 @@ class ChatAnthropic(BaseChatModel): ), ("human", "I love programming."), ] - llm.invoke(messages) + model.invoke(messages) ``` ```python @@ -658,7 +660,7 @@ class ChatAnthropic(BaseChatModel): content="J'aime la programmation.", response_metadata={ "id": "msg_01Trik66aiQ9Z1higrD5XFx3", - "model": "claude-3-7-sonnet-20250219", + "model": "claude-sonnet-4-5-20250929", "stop_reason": "end_turn", "stop_sequence": None, "usage": {"input_tokens": 25, "output_tokens": 11}, @@ -674,7 +676,7 @@ class ChatAnthropic(BaseChatModel): Stream: ```python - for chunk in llm.stream(messages): + for chunk in model.stream(messages): print(chunk.text, end="") ``` @@ -690,7 +692,7 @@ class ChatAnthropic(BaseChatModel): ``` ```python - stream = llm.stream(messages) + stream = model.stream(messages) full = next(stream) for chunk in stream: full += chunk @@ -703,13 +705,13 @@ class ChatAnthropic(BaseChatModel): Async: ```python - await llm.ainvoke(messages) + await model.ainvoke(messages) # stream: - # async for chunk in (await llm.astream(messages)) + # async for chunk in (await model.astream(messages)) # batch: - # await llm.abatch([messages]) + # await model.abatch([messages]) ``` ```python @@ -717,7 +719,7 @@ class ChatAnthropic(BaseChatModel): content="J'aime la programmation.", response_metadata={ "id": "msg_01Trik66aiQ9Z1higrD5XFx3", - "model": "claude-3-7-sonnet-20250219", + "model": "claude-sonnet-4-5-20250929", "stop_reason": "end_turn", "stop_sequence": None, "usage": {"input_tokens": 25, "output_tokens": 11}, @@ -748,8 +750,8 @@ class ChatAnthropic(BaseChatModel): location: str = Field(..., description="The city and state, e.g. San Francisco, CA") - llm_with_tools = llm.bind_tools([GetWeather, GetPopulation]) - ai_msg = llm_with_tools.invoke("Which city is hotter today and which is bigger: LA or NY?") + model_with_tools = model.bind_tools([GetWeather, GetPopulation]) + ai_msg = model_with_tools.invoke("Which city is hotter today and which is bigger: LA or NY?") ai_msg.tool_calls ``` @@ -795,8 +797,8 @@ class ChatAnthropic(BaseChatModel): rating: int | None = Field(description="How funny the joke is, from 1 to 10") - structured_llm = llm.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats") + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats") ``` ```python @@ -810,7 +812,7 @@ class ChatAnthropic(BaseChatModel): See `ChatAnthropic.with_structured_output()` for more. Image input: - See [multimodal guides](https://python.langchain.com/docs/how_to/multimodal_inputs/) + See [multimodal guides](https://docs.langchain.com/oss/python/langchain/models#multimodal) for more detail. ```python @@ -823,7 +825,7 @@ class ChatAnthropic(BaseChatModel): image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" image_data = base64.b64encode(httpx.get(image_url).content).decode("utf-8") - llm = ChatAnthropic(model="claude-3-5-sonnet-latest") + model = ChatAnthropic(model="claude-sonnet-4-5-20250929") message = HumanMessage( content=[ { @@ -841,7 +843,7 @@ class ChatAnthropic(BaseChatModel): }, ], ) - ai_msg = llm.invoke([message]) + ai_msg = model.invoke([message]) ai_msg.content ``` @@ -852,13 +854,13 @@ class ChatAnthropic(BaseChatModel): ??? note "Files API" You can also pass in files that are managed through Anthropic's - [Files API](https://docs.anthropic.com/en/docs/build-with-claude/files): + [Files API](https://docs.claude.com/en/docs/build-with-claude/files): ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic( - model="claude-sonnet-4-20250514", + model = ChatAnthropic( + model="claude-sonnet-4-5-20250929", betas=["files-api-2025-04-14"], ) input_message = { @@ -874,11 +876,11 @@ class ChatAnthropic(BaseChatModel): }, ], } - llm.invoke([input_message]) + model.invoke([input_message]) ``` PDF input: - See [multimodal guides](https://python.langchain.com/docs/how_to/multimodal_inputs/) + See [multimodal guides](https://docs.langchain.com/oss/python/langchain/models#multimodal) for more detail. ```python @@ -890,8 +892,8 @@ class ChatAnthropic(BaseChatModel): url = "https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf" data = b64encode(requests.get(url).content).decode() - llm = ChatAnthropic(model="claude-3-5-sonnet-latest") - ai_msg = llm.invoke( + model = ChatAnthropic(model="claude-sonnet-4-5-20250929") + ai_msg = model.invoke( [ HumanMessage( [ @@ -915,13 +917,13 @@ class ChatAnthropic(BaseChatModel): ??? note "Files API" You can also pass in files that are managed through Anthropic's - [Files API](https://docs.anthropic.com/en/docs/build-with-claude/files): + [Files API](https://docs.claude.com/en/docs/build-with-claude/files): ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic( - model="claude-sonnet-4-20250514", + model = ChatAnthropic( + model="claude-sonnet-4-5-20250929", betas=["files-api-2025-04-14"], ) input_message = { @@ -937,16 +939,17 @@ class ChatAnthropic(BaseChatModel): }, ], } - llm.invoke([input_message]) + model.invoke([input_message]) ``` Extended thinking: - Claude 3.7 Sonnet supports an - [extended thinking](https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking) + Certain [Claude models](https://docs.claude.com/en/docs/build-with-claude/extended-thinking#supported-models) + support an [extended thinking](https://docs.claude.com/en/docs/build-with-claude/extended-thinking) feature, which will output the step-by-step reasoning process that led to its final answer. To use it, specify the `thinking` parameter when initializing `ChatAnthropic`. + It can also be passed in as a kwarg during invocation. You will need to specify a token budget to use this feature. See usage example: @@ -954,13 +957,13 @@ class ChatAnthropic(BaseChatModel): ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic( - model="claude-3-7-sonnet-latest", + model = ChatAnthropic( + model="claude-sonnet-4-5-20250929", max_tokens=5000, thinking={"type": "enabled", "budget_tokens": 2000}, ) - response = llm.invoke("What is the cube root of 50.653?") + response = model.invoke("What is the cube root of 50.653?") response.content ``` @@ -975,19 +978,22 @@ class ChatAnthropic(BaseChatModel): ] ``` + !!! warning "Differences in thinking across model versions" + The Claude Messages API handles thinking differently across Claude Sonnet + 3.7 and Claude 4 models. Refer to [their docs](https://docs.claude.com/en/docs/build-with-claude/extended-thinking#differences-in-thinking-across-model-versions) + for more info. + Citations: - Anthropic supports a - [citations](https://docs.anthropic.com/en/docs/build-with-claude/citations) + Anthropic supports a [citations](https://docs.claude.com/en/docs/build-with-claude/citations) feature that lets Claude attach context to its answers based on source - documents supplied by the user. When - [document content blocks](https://docs.anthropic.com/en/docs/build-with-claude/citations#document-types) + documents supplied by the user. When [document content blocks](https://docs.claude.com/en/docs/build-with-claude/citations#document-types) with `"citations": {"enabled": True}` are included in a query, Claude may generate citations in its response. ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic(model="claude-3-5-haiku-latest") + model = ChatAnthropic(model="claude-3-5-haiku-20241022") messages = [ { @@ -1008,7 +1014,7 @@ class ChatAnthropic(BaseChatModel): ], } ] - response = llm.invoke(messages) + response = model.invoke(messages) response.content ``` @@ -1050,7 +1056,7 @@ class ChatAnthropic(BaseChatModel): Token usage: ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.usage_metadata ``` @@ -1062,7 +1068,7 @@ class ChatAnthropic(BaseChatModel): default: ```python - stream = llm.stream(messages) + stream = model.stream(messages) full = next(stream) for chunk in stream: full += chunk @@ -1082,13 +1088,13 @@ class ChatAnthropic(BaseChatModel): !!! note Only certain models support prompt caching. - See the [Claude documentation](https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching#supported-models) + See the [Claude documentation](https://docs.claude.com/en/docs/build-with-claude/prompt-caching#supported-models) for a full list. ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic(model="claude-3-7-sonnet-20250219") + model = ChatAnthropic(model="claude-sonnet-4-5-20250929") messages = [ { @@ -1111,7 +1117,7 @@ class ChatAnthropic(BaseChatModel): }, ] - response = llm.invoke(messages) + response = model.invoke(messages) response.usage_metadata["input_token_details"] ``` @@ -1125,7 +1131,7 @@ class ChatAnthropic(BaseChatModel): cache. ```python - response = llm.invoke( + response = model.invoke( messages, cache_control={"type": "ephemeral"}, ) @@ -1137,8 +1143,8 @@ class ChatAnthropic(BaseChatModel): apply one hour caching by setting `ttl` to `'1h'`. ```python - llm = ChatAnthropic( - model="claude-3-7-sonnet-20250219", + model = ChatAnthropic( + model="claude-sonnet-4-5-20250929", ) messages = [ @@ -1154,14 +1160,14 @@ class ChatAnthropic(BaseChatModel): } ] - response = llm.invoke(messages) + response = model.invoke(messages) ``` Details of cached token counts will be included on the `InputTokenDetails` of response's `usage_metadata`: ```python - response = llm.invoke(messages) + response = model.invoke(messages) response.usage_metadata ``` @@ -1179,18 +1185,19 @@ class ChatAnthropic(BaseChatModel): } ``` - See [Claude documentation](https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching#1-hour-cache-duration-beta) + See [Claude documentation](https://docs.claude.com/en/docs/build-with-claude/prompt-caching#1-hour-cache-duration-beta) for detail. - Extended context windows (beta): + !!! note title="Extended context windows (beta)" + Claude Sonnet 4 supports a 1-million token context window, available in beta for organizations in usage tier 4 and organizations with custom rate limits. ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic( - model="claude-sonnet-4-20250514", + model = ChatAnthropic( + model="claude-sonnet-4-5-20250929", betas=["context-1m-2025-08-07"], # Enable 1M context beta ) @@ -1210,23 +1217,24 @@ class ChatAnthropic(BaseChatModel): \"\"\") ] - response = llm.invoke(messages) + response = model.invoke(messages) ``` - See [Claude documentation](https://docs.anthropic.com/en/docs/build-with-claude/context-windows#1m-token-context-window) + See [Claude documentation](https://docs.claude.com/en/docs/build-with-claude/context-windows#1m-token-context-window) for detail. - Token-efficient tool use (beta): - See LangChain [docs](https://python.langchain.com/docs/integrations/chat/anthropic/) + !!! note title="Token-efficient tool use (beta)" + + See LangChain [docs](https://docs.langchain.com/oss/python/integrations/chat/anthropic) for more detail. ```python from langchain_anthropic import ChatAnthropic from langchain_core.tools import tool - llm = ChatAnthropic( - model="claude-3-7-sonnet-20250219", + model = ChatAnthropic( + model="claude-sonnet-4-5-20250929", temperature=0, model_kwargs={ "extra_headers": { @@ -1240,8 +1248,8 @@ class ChatAnthropic(BaseChatModel): \"\"\"Get the weather at a location.\"\"\" return "It's sunny." - llm_with_tools = llm.bind_tools([get_weather]) - response = llm_with_tools.invoke( + model_with_tools = model.bind_tools([get_weather]) + response = model_with_tools.invoke( "What's the weather in San Francisco?" ) print(response.tool_calls) @@ -1253,7 +1261,8 @@ class ChatAnthropic(BaseChatModel): Total tokens: 408 ``` - Context management: + !!! note title="Context management" + Anthropic supports a context editing feature that will automatically manage the model's context window (e.g., by clearing tool results). @@ -1263,17 +1272,18 @@ class ChatAnthropic(BaseChatModel): ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic( + model = ChatAnthropic( model="claude-sonnet-4-5-20250929", betas=["context-management-2025-06-27"], context_management={"edits": [{"type": "clear_tool_uses_20250919"}]}, ) - llm_with_tools = llm.bind_tools([{"type": "web_search_20250305", "name": "web_search"}]) - response = llm_with_tools.invoke("Search for recent developments in AI") + model_with_tools = model.bind_tools([{"type": "web_search_20250305", "name": "web_search"}]) + response = model_with_tools.invoke("Search for recent developments in AI") ``` - Built-in tools: - See LangChain [docs](https://python.langchain.com/docs/integrations/chat/anthropic/#built-in-tools) + !!! note title="Built-in tools" + + See LangChain [docs](https://docs.langchain.com/oss/python/integrations/chat/anthropic#built-in-tools) for more detail. ??? note "Web search" @@ -1281,16 +1291,16 @@ class ChatAnthropic(BaseChatModel): ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic(model="claude-3-5-haiku-latest") + model = ChatAnthropic(model="claude-3-5-haiku-20241022") tool = { "type": "web_search_20250305", "name": "web_search", "max_uses": 3, } - llm_with_tools = llm.bind_tools([tool]) + model_with_tools = model.bind_tools([tool]) - response = llm_with_tools.invoke("How do I update a web app to TypeScript 5.5?") + response = model_with_tools.invoke("How do I update a web app to TypeScript 5.5?") ``` ??? note "Web fetch (beta)" @@ -1298,8 +1308,8 @@ class ChatAnthropic(BaseChatModel): ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic( - model="claude-3-5-haiku-latest", + model = ChatAnthropic( + model="claude-3-5-haiku-20241022", betas=["web-fetch-2025-09-10"], # Enable web fetch beta ) @@ -1308,23 +1318,23 @@ class ChatAnthropic(BaseChatModel): "name": "web_fetch", "max_uses": 3, } - llm_with_tools = llm.bind_tools([tool]) + model_with_tools = model.bind_tools([tool]) - response = llm_with_tools.invoke("Please analyze the content at https://example.com/article") + response = model_with_tools.invoke("Please analyze the content at https://example.com/article") ``` ??? note "Code execution" ```python - llm = ChatAnthropic( - model="claude-sonnet-4-20250514", + model = ChatAnthropic( + model="claude-sonnet-4-5-20250929", betas=["code-execution-2025-05-22"], ) tool = {"type": "code_execution_20250522", "name": "code_execution"} - llm_with_tools = llm.bind_tools([tool]) + model_with_tools = model.bind_tools([tool]) - response = llm_with_tools.invoke( + response = model_with_tools.invoke( "Calculate the mean and standard deviation of [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]" ) ``` @@ -1347,13 +1357,13 @@ class ChatAnthropic(BaseChatModel): } ] - llm = ChatAnthropic( - model="claude-sonnet-4-20250514", + model = ChatAnthropic( + model="claude-sonnet-4-5-20250929", betas=["mcp-client-2025-04-04"], mcp_servers=mcp_servers, ) - response = llm.invoke( + response = model.invoke( "What transport protocols does the 2025-03-26 version of the MCP " "spec (modelcontextprotocol/modelcontextprotocol) support?" ) @@ -1364,12 +1374,12 @@ class ChatAnthropic(BaseChatModel): ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic(model="claude-3-7-sonnet-20250219") + model = ChatAnthropic(model="claude-sonnet-4-5-20250929") tool = {"type": "text_editor_20250124", "name": "str_replace_editor"} - llm_with_tools = llm.bind_tools([tool]) + model_with_tools = model.bind_tools([tool]) - response = llm_with_tools.invoke( + response = model_with_tools.invoke( "There's a syntax error in my primes.py file. Can you help me fix it?" ) print(response.text) @@ -1390,25 +1400,25 @@ class ChatAnthropic(BaseChatModel): ```python from langchain_anthropic import ChatAnthropic - llm = ChatAnthropic( + model = ChatAnthropic( model="claude-sonnet-4-5-20250929", betas=["context-management-2025-06-27"], ) - llm_with_tools = llm.bind_tools([{"type": "memory_20250818", "name": "memory"}]) - response = llm_with_tools.invoke("What are my interests?") + model_with_tools = model.bind_tools([{"type": "memory_20250818", "name": "memory"}]) + response = model_with_tools.invoke("What are my interests?") ``` - Response metadata + !!! note title="Response metadata" ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.response_metadata ``` ```python { "id": "msg_013xU6FHEGEq76aP4RgFerVT", - "model": "claude-3-7-sonnet-20250219", + "model": "claude-sonnet-4-5-20250929", "stop_reason": "end_turn", "stop_sequence": None, "usage": {"input_tokens": 25, "output_tokens": 11}, @@ -1530,7 +1540,11 @@ class ChatAnthropic(BaseChatModel): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object.""" + """Get the namespace of the LangChain object. + + Returns: + `["langchain", "chat_models", "anthropic"]` + """ return ["langchain", "chat_models", "anthropic"] @property @@ -1918,7 +1932,7 @@ class ChatAnthropic(BaseChatModel): parallel_tool_calls: Set to `False` to disable parallel tool use. Defaults to `None` (no specification, which allows parallel tool use). - !!! version-added "Added in version 0.3.2" + !!! version-added "Added in `langchain-anthropic` 0.3.2" kwargs: Any additional parameters are passed directly to `bind`. Example: @@ -1939,9 +1953,9 @@ class ChatAnthropic(BaseChatModel): product: str = Field(..., description="The product to look up.") - llm = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0) - llm_with_tools = llm.bind_tools([GetWeather, GetPrice]) - llm_with_tools.invoke( + model = ChatAnthropic(model="claude-sonnet-4-5-20250929", temperature=0) + model_with_tools = model.bind_tools([GetWeather, GetPrice]) + model_with_tools.invoke( "What is the weather like in San Francisco", ) # -> AIMessage( @@ -1949,7 +1963,7 @@ class ChatAnthropic(BaseChatModel): # {'text': '\nBased on the user\'s question, the relevant function to call is GetWeather, which requires the "location" parameter.\n\nThe user has directly specified the location as "San Francisco". Since San Francisco is a well known city, I can reasonably infer they mean San Francisco, CA without needing the state specified.\n\nAll the required parameters are provided, so I can proceed with the API call.\n', 'type': 'text'}, # {'text': None, 'type': 'tool_use', 'id': 'toolu_01SCgExKzQ7eqSkMHfygvYuu', 'name': 'GetWeather', 'input': {'location': 'San Francisco, CA'}} # ], - # response_metadata={'id': 'msg_01GM3zQtoFv8jGQMW7abLnhi', 'model': 'claude-3-5-sonnet-latest', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 487, 'output_tokens': 145}}, + # response_metadata={'id': 'msg_01GM3zQtoFv8jGQMW7abLnhi', 'model': 'claude-sonnet-4-5-20250929', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 487, 'output_tokens': 145}}, # id='run-87b1331e-9251-4a68-acef-f0a018b639cc-0' # ) ``` @@ -1973,163 +1987,163 @@ class ChatAnthropic(BaseChatModel): product: str = Field(..., description="The product to look up.") - llm = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0) - llm_with_tools = llm.bind_tools([GetWeather, GetPrice], tool_choice="any") - llm_with_tools.invoke( + model = ChatAnthropic(model="claude-sonnet-4-5-20250929", temperature=0) + model_with_tools = model.bind_tools([GetWeather, GetPrice], tool_choice="any") + model_with_tools.invoke( "what is the weather like in San Francisco", ) ``` - Example β€” force specific tool call with tool_choice `''`: + Example β€” force specific tool call with `tool_choice` `''`: - ```python - from langchain_anthropic import ChatAnthropic - from pydantic import BaseModel, Field + ```python + from langchain_anthropic import ChatAnthropic + from pydantic import BaseModel, Field - class GetWeather(BaseModel): - '''Get the current weather in a given location''' + class GetWeather(BaseModel): + '''Get the current weather in a given location''' - location: str = Field(..., description="The city and state, e.g. San Francisco, CA") + location: str = Field(..., description="The city and state, e.g. San Francisco, CA") - class GetPrice(BaseModel): - '''Get the price of a specific product.''' + class GetPrice(BaseModel): + '''Get the price of a specific product.''' - product: str = Field(..., description="The product to look up.") + product: str = Field(..., description="The product to look up.") - llm = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0) - llm_with_tools = llm.bind_tools([GetWeather, GetPrice], tool_choice="GetWeather") - llm_with_tools.invoke("What is the weather like in San Francisco") - ``` + model = ChatAnthropic(model="claude-sonnet-4-5-20250929", temperature=0) + model_with_tools = model.bind_tools([GetWeather, GetPrice], tool_choice="GetWeather") + model_with_tools.invoke("What is the weather like in San Francisco") + ``` Example β€” cache specific tools: - ```python - from langchain_anthropic import ChatAnthropic, convert_to_anthropic_tool - from pydantic import BaseModel, Field + ```python + from langchain_anthropic import ChatAnthropic, convert_to_anthropic_tool + from pydantic import BaseModel, Field - class GetWeather(BaseModel): - '''Get the current weather in a given location''' + class GetWeather(BaseModel): + '''Get the current weather in a given location''' - location: str = Field(..., description="The city and state, e.g. San Francisco, CA") + location: str = Field(..., description="The city and state, e.g. San Francisco, CA") - class GetPrice(BaseModel): - '''Get the price of a specific product.''' + class GetPrice(BaseModel): + '''Get the price of a specific product.''' - product: str = Field(..., description="The product to look up.") + product: str = Field(..., description="The product to look up.") - # We'll convert our pydantic class to the anthropic tool format - # before passing to bind_tools so that we can set the 'cache_control' - # field on our tool. - cached_price_tool = convert_to_anthropic_tool(GetPrice) - # Currently the only supported "cache_control" value is - # {"type": "ephemeral"}. - cached_price_tool["cache_control"] = {"type": "ephemeral"} + # We'll convert our pydantic class to the anthropic tool format + # before passing to bind_tools so that we can set the 'cache_control' + # field on our tool. + cached_price_tool = convert_to_anthropic_tool(GetPrice) + # Currently the only supported "cache_control" value is + # {"type": "ephemeral"}. + cached_price_tool["cache_control"] = {"type": "ephemeral"} - # We need to pass in extra headers to enable use of the beta cache - # control API. - llm = ChatAnthropic( - model="claude-3-5-sonnet-latest", - temperature=0, - ) - llm_with_tools = llm.bind_tools([GetWeather, cached_price_tool]) - llm_with_tools.invoke("What is the weather like in San Francisco") - ``` + # We need to pass in extra headers to enable use of the beta cache + # control API. + model = ChatAnthropic( + model="claude-sonnet-4-5-20250929", + temperature=0, + ) + model_with_tools = model.bind_tools([GetWeather, cached_price_tool]) + model_with_tools.invoke("What is the weather like in San Francisco") + ``` - This outputs: + This outputs: - ```python - AIMessage( - content=[ - { - "text": "Certainly! I can help you find out the current weather in San Francisco. To get this information, I'll use the GetWeather function. Let me fetch that data for you right away.", - "type": "text", - }, - { - "id": "toolu_01TS5h8LNo7p5imcG7yRiaUM", - "input": {"location": "San Francisco, CA"}, - "name": "GetWeather", - "type": "tool_use", - }, - ], - response_metadata={ - "id": "msg_01Xg7Wr5inFWgBxE5jH9rpRo", - "model": "claude-3-5-sonnet-latest", - "stop_reason": "tool_use", - "stop_sequence": None, - "usage": { - "input_tokens": 171, - "output_tokens": 96, - "cache_creation_input_tokens": 1470, - "cache_read_input_tokens": 0, - }, + ```python + AIMessage( + content=[ + { + "text": "Certainly! I can help you find out the current weather in San Francisco. To get this information, I'll use the GetWeather function. Let me fetch that data for you right away.", + "type": "text", }, - id="run-b36a5b54-5d69-470e-a1b0-b932d00b089e-0", - tool_calls=[ - { - "name": "GetWeather", - "args": {"location": "San Francisco, CA"}, - "id": "toolu_01TS5h8LNo7p5imcG7yRiaUM", - "type": "tool_call", - } - ], - usage_metadata={ + { + "id": "toolu_01TS5h8LNo7p5imcG7yRiaUM", + "input": {"location": "San Francisco, CA"}, + "name": "GetWeather", + "type": "tool_use", + }, + ], + response_metadata={ + "id": "msg_01Xg7Wr5inFWgBxE5jH9rpRo", + "model": "claude-sonnet-4-5-20250929", + "stop_reason": "tool_use", + "stop_sequence": None, + "usage": { "input_tokens": 171, "output_tokens": 96, - "total_tokens": 267, + "cache_creation_input_tokens": 1470, + "cache_read_input_tokens": 0, }, - ) - ``` + }, + id="run-b36a5b54-5d69-470e-a1b0-b932d00b089e-0", + tool_calls=[ + { + "name": "GetWeather", + "args": {"location": "San Francisco, CA"}, + "id": "toolu_01TS5h8LNo7p5imcG7yRiaUM", + "type": "tool_call", + } + ], + usage_metadata={ + "input_tokens": 171, + "output_tokens": 96, + "total_tokens": 267, + }, + ) + ``` - If we invoke the tool again, we can see that the "usage" information in the AIMessage.response_metadata shows that we had a cache hit: + If we invoke the tool again, we can see that the "usage" information in the AIMessage.response_metadata shows that we had a cache hit: - ```python - AIMessage( - content=[ - { - "text": "To get the current weather in San Francisco, I can use the GetWeather function. Let me check that for you.", - "type": "text", - }, - { - "id": "toolu_01HtVtY1qhMFdPprx42qU2eA", - "input": {"location": "San Francisco, CA"}, - "name": "GetWeather", - "type": "tool_use", - }, - ], - response_metadata={ - "id": "msg_016RfWHrRvW6DAGCdwB6Ac64", - "model": "claude-3-5-sonnet-latest", - "stop_reason": "tool_use", - "stop_sequence": None, - "usage": { - "input_tokens": 171, - "output_tokens": 82, - "cache_creation_input_tokens": 0, - "cache_read_input_tokens": 1470, - }, + ```python + AIMessage( + content=[ + { + "text": "To get the current weather in San Francisco, I can use the GetWeather function. Let me check that for you.", + "type": "text", }, - id="run-88b1f825-dcb7-4277-ac27-53df55d22001-0", - tool_calls=[ - { - "name": "GetWeather", - "args": {"location": "San Francisco, CA"}, - "id": "toolu_01HtVtY1qhMFdPprx42qU2eA", - "type": "tool_call", - } - ], - usage_metadata={ + { + "id": "toolu_01HtVtY1qhMFdPprx42qU2eA", + "input": {"location": "San Francisco, CA"}, + "name": "GetWeather", + "type": "tool_use", + }, + ], + response_metadata={ + "id": "msg_016RfWHrRvW6DAGCdwB6Ac64", + "model": "claude-sonnet-4-5-20250929", + "stop_reason": "tool_use", + "stop_sequence": None, + "usage": { "input_tokens": 171, "output_tokens": 82, - "total_tokens": 253, + "cache_creation_input_tokens": 0, + "cache_read_input_tokens": 1470, }, - ) - ``` + }, + id="run-88b1f825-dcb7-4277-ac27-53df55d22001-0", + tool_calls=[ + { + "name": "GetWeather", + "args": {"location": "San Francisco, CA"}, + "id": "toolu_01HtVtY1qhMFdPprx42qU2eA", + "type": "tool_call", + } + ], + usage_metadata={ + "input_tokens": 171, + "output_tokens": 82, + "total_tokens": 253, + }, + ) + ``` """ # noqa: E501 formatted_tools = [ tool if _is_builtin_tool(tool) else convert_to_anthropic_tool(tool) @@ -2178,121 +2192,125 @@ class ChatAnthropic(BaseChatModel): Args: schema: The output schema. Can be passed in as: - - an Anthropic tool schema, - - an OpenAI function/tool schema, - - a JSON Schema, - - a `TypedDict` class, - - or a Pydantic class. + - An Anthropic tool schema, + - An OpenAI function/tool schema, + - A JSON Schema, + - A `TypedDict` class, + - Or a Pydantic class. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. + + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `raw`, `parsed`, and `parsing_error`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. kwargs: Additional keyword arguments are ignored. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is `False` and `schema` is a Pydantic class, Runnable outputs - an instance of `schema` (i.e., a Pydantic object). + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - Otherwise, if `include_raw` is `False` then Runnable outputs a dict. + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` - If `include_raw` is True, then Runnable outputs a dict with keys: + Example: Pydantic schema (`include_raw=False`): - - `'raw'`: BaseMessage - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None - - Example: Pydantic schema (include_raw=False): - - ```python - from langchain_anthropic import ChatAnthropic - from pydantic import BaseModel + ```python + from langchain_anthropic import ChatAnthropic + from pydantic import BaseModel - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str + answer: str + justification: str - llm = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = ChatAnthropic(model="claude-sonnet-4-5-20250929", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") - # -> AnswerWithJustification( - # answer='They weigh the same', - # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' - # ) - ``` + # -> AnswerWithJustification( + # answer='They weigh the same', + # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' + # ) + ``` - Example: Pydantic schema (include_raw=True): + Example: Pydantic schema (`include_raw=True`): - ```python - from langchain_anthropic import ChatAnthropic - from pydantic import BaseModel + ```python + from langchain_anthropic import ChatAnthropic + from pydantic import BaseModel - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str + answer: str + justification: str - llm = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification, include_raw=True) + model = ChatAnthropic(model="claude-sonnet-4-5-20250929", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification, include_raw=True) - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") - # -> { - # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), - # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), - # 'parsing_error': None - # } - ``` + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") + # -> { + # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), + # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), + # 'parsing_error': None + # } + ``` - Example: Dict schema (include_raw=False): + Example: `dict` schema (`include_raw=False`): - ```python - from langchain_anthropic import ChatAnthropic + ```python + from langchain_anthropic import ChatAnthropic - schema = { - "name": "AnswerWithJustification", - "description": "An answer to the user question along with justification for the answer.", - "input_schema": { - "type": "object", - "properties": { - "answer": {"type": "string"}, - "justification": {"type": "string"}, - }, - "required": ["answer", "justification"], + schema = { + "name": "AnswerWithJustification", + "description": "An answer to the user question along with justification for the answer.", + "input_schema": { + "type": "object", + "properties": { + "answer": {"type": "string"}, + "justification": {"type": "string"}, }, - } - llm = ChatAnthropic(model="claude-3-5-sonnet-latest", temperature=0) - structured_llm = llm.with_structured_output(schema) - - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") - # -> { - # 'answer': 'They weigh the same', - # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' - # } - ``` - - !!! warning "Behavior changed in 0.1.22" - Added support for TypedDict class as `schema`. + "required": ["answer", "justification"], + }, + } + model = ChatAnthropic(model="claude-sonnet-4-5-20250929", temperature=0) + structured_model = model.with_structured_output(schema) + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") + # -> { + # 'answer': 'They weigh the same', + # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' + # } + ``` """ # noqa: E501 formatted_tool = convert_to_anthropic_tool(schema) tool_name = formatted_tool["name"] @@ -2345,60 +2363,60 @@ class ChatAnthropic(BaseChatModel): Args: messages: The message inputs to tokenize. - tools: If provided, sequence of dict, BaseModel, function, or BaseTools - to be converted to tool schemas. + tools: If provided, sequence of `dict`, `BaseModel`, function, or `BaseTool` + objects to be converted to tool schemas. kwargs: Additional keyword arguments are passed to the Anthropic `messages.count_tokens` method. Basic usage: - ```python - from langchain_anthropic import ChatAnthropic - from langchain_core.messages import HumanMessage, SystemMessage + ```python + from langchain_anthropic import ChatAnthropic + from langchain_core.messages import HumanMessage, SystemMessage - llm = ChatAnthropic(model="claude-3-5-sonnet-20241022") + model = ChatAnthropic(model="claude-sonnet-4-5-20250929") - messages = [ - SystemMessage(content="You are a scientist"), - HumanMessage(content="Hello, Claude"), - ] - llm.get_num_tokens_from_messages(messages) - ``` + messages = [ + SystemMessage(content="You are a scientist"), + HumanMessage(content="Hello, Claude"), + ] + model.get_num_tokens_from_messages(messages) + ``` - ```txt - 14 - ``` + ```txt + 14 + ``` Pass tool schemas: - ```python - from langchain_anthropic import ChatAnthropic - from langchain_core.messages import HumanMessage - from langchain_core.tools import tool + ```python + from langchain_anthropic import ChatAnthropic + from langchain_core.messages import HumanMessage + from langchain_core.tools import tool - llm = ChatAnthropic(model="claude-3-5-sonnet-20241022") + model = ChatAnthropic(model="claude-sonnet-4-5-20250929") - @tool(parse_docstring=True) - def get_weather(location: str) -> str: - \"\"\"Get the current weather in a given location + @tool(parse_docstring=True) + def get_weather(location: str) -> str: + \"\"\"Get the current weather in a given location - Args: - location: The city and state, e.g. San Francisco, CA - \"\"\" - return "Sunny" + Args: + location: The city and state, e.g. San Francisco, CA + \"\"\" + return "Sunny" - messages = [ - HumanMessage(content="What's the weather like in San Francisco?"), - ] - llm.get_num_tokens_from_messages(messages, tools=[get_weather]) - ``` + messages = [ + HumanMessage(content="What's the weather like in San Francisco?"), + ] + model.get_num_tokens_from_messages(messages, tools=[get_weather]) + ``` - ```txt - 403 - ``` + ```txt + 403 + ``` - !!! warning "Behavior changed in 0.3.0" - Uses Anthropic's [token counting API](https://docs.anthropic.com/en/docs/build-with-claude/token-counting) to count tokens in messages. + !!! warning "Behavior changed in `langchain-anthropic` 0.3.0" + Uses Anthropic's [token counting API](https://docs.claude.com/en/docs/build-with-claude/token-counting) to count tokens in messages. """ # noqa: D214,E501 formatted_system, formatted_messages = _format_messages(messages) @@ -2646,7 +2664,7 @@ def _make_message_chunk_from_anthropic_event( # Mark final Anthropic stream chunk message_chunk.chunk_position = "last" # Unhandled event types (e.g., `content_block_stop`, `ping` events) - # https://docs.anthropic.com/en/docs/build-with-claude/streaming#other-events + # https://docs.claude.com/en/docs/build-with-claude/streaming#other-events else: pass diff --git a/libs/partners/anthropic/langchain_anthropic/llms.py b/libs/partners/anthropic/langchain_anthropic/llms.py index 945d353fb85..d9f794cd2ac 100644 --- a/libs/partners/anthropic/langchain_anthropic/llms.py +++ b/libs/partners/anthropic/langchain_anthropic/llms.py @@ -1,4 +1,4 @@ -"""Anthropic LLM wrapper. Chat models are in chat_models.py.""" +"""Anthropic LLM wrapper. Chat models are in `chat_models.py`.""" from __future__ import annotations @@ -23,9 +23,11 @@ from typing_extensions import Self class _AnthropicCommon(BaseLanguageModel): - client: Any = None #: :meta private: - async_client: Any = None #: :meta private: - model: str = Field(default="claude-3-5-sonnet-latest", alias="model_name") + client: Any = None + + async_client: Any = None + + model: str = Field(default="claude-sonnet-4-5", alias="model_name") """Model name to use.""" max_tokens: int = Field(default=1024, alias="max_tokens_to_sample") @@ -70,8 +72,11 @@ class _AnthropicCommon(BaseLanguageModel): """Automatically read from env var `ANTHROPIC_API_KEY` if not provided.""" HUMAN_PROMPT: str | None = None + AI_PROMPT: str | None = None + count_tokens: Callable[[str], int] | None = None + model_kwargs: dict[str, Any] = Field(default_factory=dict) @model_validator(mode="before") @@ -127,7 +132,7 @@ class _AnthropicCommon(BaseLanguageModel): class AnthropicLLM(LLM, _AnthropicCommon): - """Anthropic large language model. + """Anthropic text completion large language model (legacy LLM). To use, you should have the environment variable `ANTHROPIC_API_KEY` set with your API key, or pass it as a named parameter to the constructor. @@ -136,7 +141,7 @@ class AnthropicLLM(LLM, _AnthropicCommon): ```python from langchain_anthropic import AnthropicLLM - model = AnthropicLLM() + model = AnthropicLLM(model="claude-sonnet-4-5") ``` """ diff --git a/libs/partners/anthropic/langchain_anthropic/middleware/__init__.py b/libs/partners/anthropic/langchain_anthropic/middleware/__init__.py new file mode 100644 index 00000000000..d1a34993c8d --- /dev/null +++ b/libs/partners/anthropic/langchain_anthropic/middleware/__init__.py @@ -0,0 +1,25 @@ +"""Middleware for Anthropic models.""" + +from langchain_anthropic.middleware.anthropic_tools import ( + FilesystemClaudeMemoryMiddleware, + FilesystemClaudeTextEditorMiddleware, + StateClaudeMemoryMiddleware, + StateClaudeTextEditorMiddleware, +) +from langchain_anthropic.middleware.bash import ClaudeBashToolMiddleware +from langchain_anthropic.middleware.file_search import ( + StateFileSearchMiddleware, +) +from langchain_anthropic.middleware.prompt_caching import ( + AnthropicPromptCachingMiddleware, +) + +__all__ = [ + "AnthropicPromptCachingMiddleware", + "ClaudeBashToolMiddleware", + "FilesystemClaudeMemoryMiddleware", + "FilesystemClaudeTextEditorMiddleware", + "StateClaudeMemoryMiddleware", + "StateClaudeTextEditorMiddleware", + "StateFileSearchMiddleware", +] diff --git a/libs/partners/anthropic/langchain_anthropic/middleware/anthropic_tools.py b/libs/partners/anthropic/langchain_anthropic/middleware/anthropic_tools.py new file mode 100644 index 00000000000..b72dd1a62c8 --- /dev/null +++ b/libs/partners/anthropic/langchain_anthropic/middleware/anthropic_tools.py @@ -0,0 +1,1184 @@ +"""Anthropic text editor and memory tool middleware. + +This module provides client-side implementations of Anthropic's text editor and +memory tools using schema-less tool definitions and tool call interception. +""" + +from __future__ import annotations + +import os +import shutil +from datetime import datetime, timezone +from pathlib import Path +from typing import TYPE_CHECKING, Annotated, Any, cast + +from langchain.agents.middleware.types import ( + AgentMiddleware, + AgentState, + ModelRequest, + ModelResponse, +) +from langchain_core.messages import ToolMessage +from langgraph.types import Command +from typing_extensions import NotRequired, TypedDict + +if TYPE_CHECKING: + from collections.abc import Awaitable, Callable, Sequence + + from langchain.agents.middleware.types import ToolCallRequest + +# Tool type constants +TEXT_EDITOR_TOOL_TYPE = "text_editor_20250728" +TEXT_EDITOR_TOOL_NAME = "str_replace_based_edit_tool" +MEMORY_TOOL_TYPE = "memory_20250818" +MEMORY_TOOL_NAME = "memory" + +MEMORY_SYSTEM_PROMPT = """IMPORTANT: ALWAYS VIEW YOUR MEMORY DIRECTORY BEFORE \ +DOING ANYTHING ELSE. +MEMORY PROTOCOL: +1. Use the `view` command of your `memory` tool to check for earlier progress. +2. ... (work on the task) ... + - As you make progress, record status / progress / thoughts etc in your memory. +ASSUME INTERRUPTION: Your context window might be reset at any moment, so you risk \ +losing any progress that is not recorded in your memory directory.""" + + +class FileData(TypedDict): + """Data structure for storing file contents.""" + + content: list[str] + """Lines of the file.""" + + created_at: str + """ISO 8601 timestamp of file creation.""" + + modified_at: str + """ISO 8601 timestamp of last modification.""" + + +def files_reducer( + left: dict[str, FileData] | None, right: dict[str, FileData | None] +) -> dict[str, FileData]: + """Custom reducer that merges file updates. + + Args: + left: Existing files dict. + right: New files dict to merge (None values delete files). + + Returns: + Merged dict where right overwrites left for matching keys. + """ + if left is None: + # Filter out None values when initializing + return {k: v for k, v in right.items() if v is not None} + + # Merge, filtering out None values (deletions) + result = {**left} + for k, v in right.items(): + if v is None: + result.pop(k, None) + else: + result[k] = v + return result + + +class AnthropicToolsState(AgentState): + """State schema for Anthropic text editor and memory tools.""" + + text_editor_files: NotRequired[Annotated[dict[str, FileData], files_reducer]] + """Virtual file system for text editor tools.""" + + memory_files: NotRequired[Annotated[dict[str, FileData], files_reducer]] + """Virtual file system for memory tools.""" + + +def _validate_path(path: str, *, allowed_prefixes: Sequence[str] | None = None) -> str: + """Validate and normalize file path for security. + + Args: + path: The path to validate. + allowed_prefixes: Optional list of allowed path prefixes. + + Returns: + Normalized canonical path. + + Raises: + ValueError: If path contains traversal sequences or violates prefix rules. + """ + # Reject paths with traversal attempts + if ".." in path or path.startswith("~"): + msg = f"Path traversal not allowed: {path}" + raise ValueError(msg) + + # Normalize path (resolve ., //, etc.) + normalized = os.path.normpath(path) + + # Convert to forward slashes for consistency + normalized = normalized.replace("\\", "/") + + # Ensure path starts with / + if not normalized.startswith("/"): + normalized = f"/{normalized}" + + # Check allowed prefixes if specified + if allowed_prefixes is not None and not any( + normalized.startswith(prefix) for prefix in allowed_prefixes + ): + msg = f"Path must start with one of {allowed_prefixes}: {path}" + raise ValueError(msg) + + return normalized + + +def _list_directory(files: dict[str, FileData], path: str) -> list[str]: + """List files in a directory. + + Args: + files: Files dict. + path: Normalized directory path. + + Returns: + Sorted list of file paths in the directory. + """ + # Ensure path ends with / for directory matching + dir_path = path if path.endswith("/") else f"{path}/" + + matching_files = [] + for file_path in files: + if file_path.startswith(dir_path): + # Get relative path from directory + relative = file_path[len(dir_path) :] + # Only include direct children (no subdirectories) + if "/" not in relative: + matching_files.append(file_path) + + return sorted(matching_files) + + +class _StateClaudeFileToolMiddleware(AgentMiddleware): + """Base class for state-based file tool middleware (internal).""" + + state_schema = AnthropicToolsState + + def __init__( + self, + *, + tool_type: str, + tool_name: str, + state_key: str, + allowed_path_prefixes: Sequence[str] | None = None, + system_prompt: str | None = None, + ) -> None: + """Initialize the middleware. + + Args: + tool_type: Tool type identifier. + tool_name: Tool name. + state_key: State key for file storage. + allowed_path_prefixes: Optional list of allowed path prefixes. + system_prompt: Optional system prompt to inject. + """ + self.tool_type = tool_type + self.tool_name = tool_name + self.state_key = state_key + self.allowed_prefixes = allowed_path_prefixes + self.system_prompt = system_prompt + + def wrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelResponse: + """Inject tool and optional system prompt.""" + # Add tool + tools = list(request.tools or []) + tools.append( + { + "type": self.tool_type, + "name": self.tool_name, + } + ) + request.tools = tools + + # Inject system prompt if provided + if self.system_prompt: + request.system_prompt = ( + request.system_prompt + "\n\n" + self.system_prompt + if request.system_prompt + else self.system_prompt + ) + + return handler(request) + + async def awrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelResponse: + """Inject tool and optional system prompt (async version).""" + # Add tool + tools = list(request.tools or []) + tools.append( + { + "type": self.tool_type, + "name": self.tool_name, + } + ) + request.tools = tools + + # Inject system prompt if provided + if self.system_prompt: + request.system_prompt = ( + request.system_prompt + "\n\n" + self.system_prompt + if request.system_prompt + else self.system_prompt + ) + + return await handler(request) + + def wrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], ToolMessage | Command], + ) -> ToolMessage | Command: + """Intercept tool calls.""" + tool_call = request.tool_call + tool_name = tool_call.get("name") + + if tool_name != self.tool_name: + return handler(request) + + # Handle tool call + try: + args = tool_call.get("args", {}) + command = args.get("command") + state = request.state + + if command == "view": + return self._handle_view(args, state, tool_call["id"]) + if command == "create": + return self._handle_create(args, state, tool_call["id"]) + if command == "str_replace": + return self._handle_str_replace(args, state, tool_call["id"]) + if command == "insert": + return self._handle_insert(args, state, tool_call["id"]) + if command == "delete": + return self._handle_delete(args, state, tool_call["id"]) + if command == "rename": + return self._handle_rename(args, state, tool_call["id"]) + + msg = f"Unknown command: {command}" + return ToolMessage( + content=msg, + tool_call_id=tool_call["id"], + name=tool_name, + status="error", + ) + except (ValueError, FileNotFoundError) as e: + return ToolMessage( + content=str(e), + tool_call_id=tool_call["id"], + name=tool_name, + status="error", + ) + + async def awrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]], + ) -> ToolMessage | Command: + """Intercept tool calls (async version).""" + tool_call = request.tool_call + tool_name = tool_call.get("name") + + if tool_name != self.tool_name: + return await handler(request) + + # Handle tool call + try: + args = tool_call.get("args", {}) + command = args.get("command") + state = request.state + + if command == "view": + return self._handle_view(args, state, tool_call["id"]) + if command == "create": + return self._handle_create(args, state, tool_call["id"]) + if command == "str_replace": + return self._handle_str_replace(args, state, tool_call["id"]) + if command == "insert": + return self._handle_insert(args, state, tool_call["id"]) + if command == "delete": + return self._handle_delete(args, state, tool_call["id"]) + if command == "rename": + return self._handle_rename(args, state, tool_call["id"]) + + msg = f"Unknown command: {command}" + return ToolMessage( + content=msg, + tool_call_id=tool_call["id"], + name=tool_name, + status="error", + ) + except (ValueError, FileNotFoundError) as e: + return ToolMessage( + content=str(e), + tool_call_id=tool_call["id"], + name=tool_name, + status="error", + ) + + def _handle_view( + self, args: dict, state: AnthropicToolsState, tool_call_id: str | None + ) -> Command: + """Handle view command.""" + path = args["path"] + normalized_path = _validate_path(path, allowed_prefixes=self.allowed_prefixes) + + files = cast("dict[str, Any]", state.get(self.state_key, {})) + file_data = files.get(normalized_path) + + if file_data is None: + # Try directory listing + matching = _list_directory(files, normalized_path) + + if matching: + content = "\n".join(matching) + return Command( + update={ + "messages": [ + ToolMessage( + content=content, + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ] + } + ) + + msg = f"File not found: {path}" + raise FileNotFoundError(msg) + + # Format file content with line numbers + lines_content = file_data["content"] + formatted_lines = [f"{i + 1}|{line}" for i, line in enumerate(lines_content)] + content = "\n".join(formatted_lines) + + return Command( + update={ + "messages": [ + ToolMessage( + content=content, + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ] + } + ) + + def _handle_create( + self, args: dict, state: AnthropicToolsState, tool_call_id: str | None + ) -> Command: + """Handle create command.""" + path = args["path"] + file_text = args["file_text"] + + normalized_path = _validate_path(path, allowed_prefixes=self.allowed_prefixes) + + # Get existing files + files = cast("dict[str, Any]", state.get(self.state_key, {})) + existing = files.get(normalized_path) + + # Create file data + now = datetime.now(timezone.utc).isoformat() + created_at = existing["created_at"] if existing else now + + content_lines = file_text.split("\n") + + return Command( + update={ + self.state_key: { + normalized_path: { + "content": content_lines, + "created_at": created_at, + "modified_at": now, + } + }, + "messages": [ + ToolMessage( + content=f"File created: {path}", + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ], + } + ) + + def _handle_str_replace( + self, args: dict, state: AnthropicToolsState, tool_call_id: str | None + ) -> Command: + """Handle str_replace command.""" + path = args["path"] + old_str = args["old_str"] + new_str = args.get("new_str", "") + + normalized_path = _validate_path(path, allowed_prefixes=self.allowed_prefixes) + + # Read file + files = cast("dict[str, Any]", state.get(self.state_key, {})) + file_data = files.get(normalized_path) + if file_data is None: + msg = f"File not found: {path}" + raise FileNotFoundError(msg) + + lines_content = file_data["content"] + content = "\n".join(lines_content) + + # Replace string + if old_str not in content: + msg = f"String not found in file: {old_str}" + raise ValueError(msg) + + new_content = content.replace(old_str, new_str, 1) + new_lines = new_content.split("\n") + + # Update file + now = datetime.now(timezone.utc).isoformat() + + return Command( + update={ + self.state_key: { + normalized_path: { + "content": new_lines, + "created_at": file_data["created_at"], + "modified_at": now, + } + }, + "messages": [ + ToolMessage( + content=f"String replaced in {path}", + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ], + } + ) + + def _handle_insert( + self, args: dict, state: AnthropicToolsState, tool_call_id: str | None + ) -> Command: + """Handle insert command.""" + path = args["path"] + insert_line = args["insert_line"] + text_to_insert = args["new_str"] + + normalized_path = _validate_path(path, allowed_prefixes=self.allowed_prefixes) + + # Read file + files = cast("dict[str, Any]", state.get(self.state_key, {})) + file_data = files.get(normalized_path) + if file_data is None: + msg = f"File not found: {path}" + raise FileNotFoundError(msg) + + lines_content = file_data["content"] + new_lines = text_to_insert.split("\n") + + # Insert after insert_line (0-indexed) + updated_lines = ( + lines_content[:insert_line] + new_lines + lines_content[insert_line:] + ) + + # Update file + now = datetime.now(timezone.utc).isoformat() + + return Command( + update={ + self.state_key: { + normalized_path: { + "content": updated_lines, + "created_at": file_data["created_at"], + "modified_at": now, + } + }, + "messages": [ + ToolMessage( + content=f"Text inserted in {path}", + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ], + } + ) + + def _handle_delete( + self, + args: dict, + state: AnthropicToolsState, + tool_call_id: str | None, + ) -> Command: + """Handle delete command.""" + path = args["path"] + + normalized_path = _validate_path(path, allowed_prefixes=self.allowed_prefixes) + + return Command( + update={ + self.state_key: {normalized_path: None}, + "messages": [ + ToolMessage( + content=f"File deleted: {path}", + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ], + } + ) + + def _handle_rename( + self, args: dict, state: AnthropicToolsState, tool_call_id: str | None + ) -> Command: + """Handle rename command.""" + old_path = args["old_path"] + new_path = args["new_path"] + + normalized_old = _validate_path( + old_path, allowed_prefixes=self.allowed_prefixes + ) + normalized_new = _validate_path( + new_path, allowed_prefixes=self.allowed_prefixes + ) + + # Read file + files = cast("dict[str, Any]", state.get(self.state_key, {})) + file_data = files.get(normalized_old) + if file_data is None: + msg = f"File not found: {old_path}" + raise ValueError(msg) + + # Update timestamp + now = datetime.now(timezone.utc).isoformat() + file_data_copy = file_data.copy() + file_data_copy["modified_at"] = now + + return Command( + update={ + self.state_key: { + normalized_old: None, + normalized_new: file_data_copy, + }, + "messages": [ + ToolMessage( + content=f"File renamed: {old_path} -> {new_path}", + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ], + } + ) + + +class StateClaudeTextEditorMiddleware(_StateClaudeFileToolMiddleware): + """State-based text editor tool middleware. + + Provides Anthropic's text_editor tool using LangGraph state for storage. + Files persist for the conversation thread. + + Example: + ```python + from langchain.agents import create_agent + from langchain.agents.middleware import StateTextEditorToolMiddleware + + agent = create_agent( + model=model, + tools=[], + middleware=[StateTextEditorToolMiddleware()], + ) + ``` + """ + + def __init__( + self, + *, + allowed_path_prefixes: Sequence[str] | None = None, + ) -> None: + """Initialize the text editor middleware. + + Args: + allowed_path_prefixes: Optional list of allowed path prefixes. + If specified, only paths starting with these prefixes are allowed. + """ + super().__init__( + tool_type=TEXT_EDITOR_TOOL_TYPE, + tool_name=TEXT_EDITOR_TOOL_NAME, + state_key="text_editor_files", + allowed_path_prefixes=allowed_path_prefixes, + ) + + +class StateClaudeMemoryMiddleware(_StateClaudeFileToolMiddleware): + """State-based memory tool middleware. + + Provides Anthropic's memory tool using LangGraph state for storage. + Files persist for the conversation thread. Enforces /memories prefix + and injects Anthropic's recommended system prompt. + + Example: + ```python + from langchain.agents import create_agent + from langchain.agents.middleware import StateMemoryToolMiddleware + + agent = create_agent( + model=model, + tools=[], + middleware=[StateMemoryToolMiddleware()], + ) + ``` + """ + + def __init__( + self, + *, + allowed_path_prefixes: Sequence[str] | None = None, + system_prompt: str = MEMORY_SYSTEM_PROMPT, + ) -> None: + """Initialize the memory middleware. + + Args: + allowed_path_prefixes: Optional list of allowed path prefixes. + Defaults to ["/memories"]. + system_prompt: System prompt to inject. Defaults to Anthropic's + recommended memory prompt. + """ + super().__init__( + tool_type=MEMORY_TOOL_TYPE, + tool_name=MEMORY_TOOL_NAME, + state_key="memory_files", + allowed_path_prefixes=allowed_path_prefixes or ["/memories"], + system_prompt=system_prompt, + ) + + +class _FilesystemClaudeFileToolMiddleware(AgentMiddleware): + """Base class for filesystem-based file tool middleware (internal).""" + + def __init__( + self, + *, + tool_type: str, + tool_name: str, + root_path: str, + allowed_prefixes: list[str] | None = None, + max_file_size_mb: int = 10, + system_prompt: str | None = None, + ) -> None: + """Initialize the middleware. + + Args: + tool_type: Tool type identifier. + tool_name: Tool name. + root_path: Root directory for file operations. + allowed_prefixes: Optional list of allowed virtual path prefixes. + max_file_size_mb: Maximum file size in MB. + system_prompt: Optional system prompt to inject. + """ + self.tool_type = tool_type + self.tool_name = tool_name + self.root_path = Path(root_path).resolve() + self.allowed_prefixes = allowed_prefixes or ["/"] + self.max_file_size_bytes = max_file_size_mb * 1024 * 1024 + self.system_prompt = system_prompt + + # Create root directory if it doesn't exist + self.root_path.mkdir(parents=True, exist_ok=True) + + def wrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelResponse: + """Inject tool and optional system prompt.""" + # Add tool + tools = list(request.tools or []) + tools.append( + { + "type": self.tool_type, + "name": self.tool_name, + } + ) + request.tools = tools + + # Inject system prompt if provided + if self.system_prompt: + request.system_prompt = ( + request.system_prompt + "\n\n" + self.system_prompt + if request.system_prompt + else self.system_prompt + ) + return handler(request) + + async def awrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelResponse: + """Inject tool and optional system prompt (async version).""" + # Add tool + tools = list(request.tools or []) + tools.append( + { + "type": self.tool_type, + "name": self.tool_name, + } + ) + request.tools = tools + + # Inject system prompt if provided + if self.system_prompt: + request.system_prompt = ( + request.system_prompt + "\n\n" + self.system_prompt + if request.system_prompt + else self.system_prompt + ) + + return await handler(request) + + def wrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], ToolMessage | Command], + ) -> ToolMessage | Command: + """Intercept tool calls.""" + tool_call = request.tool_call + tool_name = tool_call.get("name") + + if tool_name != self.tool_name: + return handler(request) + + # Handle tool call + try: + args = tool_call.get("args", {}) + command = args.get("command") + + if command == "view": + return self._handle_view(args, tool_call["id"]) + if command == "create": + return self._handle_create(args, tool_call["id"]) + if command == "str_replace": + return self._handle_str_replace(args, tool_call["id"]) + if command == "insert": + return self._handle_insert(args, tool_call["id"]) + if command == "delete": + return self._handle_delete(args, tool_call["id"]) + if command == "rename": + return self._handle_rename(args, tool_call["id"]) + + msg = f"Unknown command: {command}" + return ToolMessage( + content=msg, + tool_call_id=tool_call["id"], + name=tool_name, + status="error", + ) + except (ValueError, FileNotFoundError) as e: + return ToolMessage( + content=str(e), + tool_call_id=tool_call["id"], + name=tool_name, + status="error", + ) + + async def awrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]], + ) -> ToolMessage | Command: + """Intercept tool calls (async version).""" + tool_call = request.tool_call + tool_name = tool_call.get("name") + + if tool_name != self.tool_name: + return await handler(request) + + # Handle tool call + try: + args = tool_call.get("args", {}) + command = args.get("command") + + if command == "view": + return self._handle_view(args, tool_call["id"]) + if command == "create": + return self._handle_create(args, tool_call["id"]) + if command == "str_replace": + return self._handle_str_replace(args, tool_call["id"]) + if command == "insert": + return self._handle_insert(args, tool_call["id"]) + if command == "delete": + return self._handle_delete(args, tool_call["id"]) + if command == "rename": + return self._handle_rename(args, tool_call["id"]) + + msg = f"Unknown command: {command}" + return ToolMessage( + content=msg, + tool_call_id=tool_call["id"], + name=tool_name, + status="error", + ) + except (ValueError, FileNotFoundError) as e: + return ToolMessage( + content=str(e), + tool_call_id=tool_call["id"], + name=tool_name, + status="error", + ) + + def _validate_and_resolve_path(self, path: str) -> Path: + """Validate and resolve a virtual path to filesystem path. + + Args: + path: Virtual path (e.g., /file.txt or /src/main.py). + + Returns: + Resolved absolute filesystem path within root_path. + + Raises: + ValueError: If path contains traversal attempts, escapes root directory, + or violates allowed_prefixes restrictions. + """ + # Normalize path + if not path.startswith("/"): + path = "/" + path + + # Check for path traversal + if ".." in path or "~" in path: + msg = "Path traversal not allowed" + raise ValueError(msg) + + # Convert virtual path to filesystem path + # Remove leading / and resolve relative to root + relative = path.lstrip("/") + full_path = (self.root_path / relative).resolve() + + # Ensure path is within root + try: + full_path.relative_to(self.root_path) + except ValueError: + msg = f"Path outside root directory: {path}" + raise ValueError(msg) from None + + # Check allowed prefixes + virtual_path = "/" + str(full_path.relative_to(self.root_path)) + if self.allowed_prefixes: + allowed = any( + virtual_path.startswith(prefix) or virtual_path == prefix.rstrip("/") + for prefix in self.allowed_prefixes + ) + if not allowed: + msg = f"Path must start with one of: {self.allowed_prefixes}" + raise ValueError(msg) + + return full_path + + def _handle_view(self, args: dict, tool_call_id: str | None) -> Command: + """Handle view command.""" + path = args["path"] + full_path = self._validate_and_resolve_path(path) + + if not full_path.exists() or not full_path.is_file(): + msg = f"File not found: {path}" + raise FileNotFoundError(msg) + + # Check file size + if full_path.stat().st_size > self.max_file_size_bytes: + max_mb = self.max_file_size_bytes / 1024 / 1024 + msg = f"File too large: {path} exceeds {max_mb}MB" + raise ValueError(msg) + + # Read file + try: + content = full_path.read_text() + except UnicodeDecodeError as e: + msg = f"Cannot decode file {path}: {e}" + raise ValueError(msg) from e + + # Format with line numbers + lines = content.split("\n") + # Remove trailing newline's empty string if present + if lines and lines[-1] == "": + lines = lines[:-1] + formatted_lines = [f"{i + 1}|{line}" for i, line in enumerate(lines)] + formatted_content = "\n".join(formatted_lines) + + return Command( + update={ + "messages": [ + ToolMessage( + content=formatted_content, + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ] + } + ) + + def _handle_create(self, args: dict, tool_call_id: str | None) -> Command: + """Handle create command.""" + path = args["path"] + file_text = args["file_text"] + + full_path = self._validate_and_resolve_path(path) + + # Create parent directories + full_path.parent.mkdir(parents=True, exist_ok=True) + + # Write file + full_path.write_text(file_text + "\n") + + return Command( + update={ + "messages": [ + ToolMessage( + content=f"File created: {path}", + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ] + } + ) + + def _handle_str_replace(self, args: dict, tool_call_id: str | None) -> Command: + """Handle str_replace command.""" + path = args["path"] + old_str = args["old_str"] + new_str = args.get("new_str", "") + + full_path = self._validate_and_resolve_path(path) + + if not full_path.exists(): + msg = f"File not found: {path}" + raise FileNotFoundError(msg) + + # Read file + content = full_path.read_text() + + # Replace string + if old_str not in content: + msg = f"String not found in file: {old_str}" + raise ValueError(msg) + + new_content = content.replace(old_str, new_str, 1) + + # Write back + full_path.write_text(new_content) + + return Command( + update={ + "messages": [ + ToolMessage( + content=f"String replaced in {path}", + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ] + } + ) + + def _handle_insert(self, args: dict, tool_call_id: str | None) -> Command: + """Handle insert command.""" + path = args["path"] + insert_line = args["insert_line"] + text_to_insert = args["new_str"] + + full_path = self._validate_and_resolve_path(path) + + if not full_path.exists(): + msg = f"File not found: {path}" + raise FileNotFoundError(msg) + + # Read file + content = full_path.read_text() + lines = content.split("\n") + # Handle trailing newline + if lines and lines[-1] == "": + lines = lines[:-1] + had_trailing_newline = True + else: + had_trailing_newline = False + + new_lines = text_to_insert.split("\n") + + # Insert after insert_line (0-indexed) + updated_lines = lines[:insert_line] + new_lines + lines[insert_line:] + + # Write back + new_content = "\n".join(updated_lines) + if had_trailing_newline: + new_content += "\n" + full_path.write_text(new_content) + + return Command( + update={ + "messages": [ + ToolMessage( + content=f"Text inserted in {path}", + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ] + } + ) + + def _handle_delete(self, args: dict, tool_call_id: str | None) -> Command: + """Handle delete command.""" + path = args["path"] + full_path = self._validate_and_resolve_path(path) + + if full_path.is_file(): + full_path.unlink() + elif full_path.is_dir(): + shutil.rmtree(full_path) + # If doesn't exist, silently succeed + + return Command( + update={ + "messages": [ + ToolMessage( + content=f"File deleted: {path}", + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ] + } + ) + + def _handle_rename(self, args: dict, tool_call_id: str | None) -> Command: + """Handle rename command.""" + old_path = args["old_path"] + new_path = args["new_path"] + + old_full = self._validate_and_resolve_path(old_path) + new_full = self._validate_and_resolve_path(new_path) + + if not old_full.exists(): + msg = f"File not found: {old_path}" + raise ValueError(msg) + + # Create parent directory for new path + new_full.parent.mkdir(parents=True, exist_ok=True) + + # Rename + old_full.rename(new_full) + + return Command( + update={ + "messages": [ + ToolMessage( + content=f"File renamed: {old_path} -> {new_path}", + tool_call_id=tool_call_id, + name=self.tool_name, + ) + ] + } + ) + + +class FilesystemClaudeTextEditorMiddleware(_FilesystemClaudeFileToolMiddleware): + """Filesystem-based text editor tool middleware. + + Provides Anthropic's text_editor tool using local filesystem for storage. + User handles persistence via volumes, git, or other mechanisms. + + Example: + ```python + from langchain.agents import create_agent + from langchain.agents.middleware import FilesystemTextEditorToolMiddleware + + agent = create_agent( + model=model, + tools=[], + middleware=[FilesystemTextEditorToolMiddleware(root_path="/workspace")], + ) + ``` + """ + + def __init__( + self, + *, + root_path: str, + allowed_prefixes: list[str] | None = None, + max_file_size_mb: int = 10, + ) -> None: + """Initialize the text editor middleware. + + Args: + root_path: Root directory for file operations. + allowed_prefixes: Optional list of allowed virtual path prefixes + (default: ["/"]). + max_file_size_mb: Maximum file size in MB (default: 10). + """ + super().__init__( + tool_type=TEXT_EDITOR_TOOL_TYPE, + tool_name=TEXT_EDITOR_TOOL_NAME, + root_path=root_path, + allowed_prefixes=allowed_prefixes, + max_file_size_mb=max_file_size_mb, + ) + + +class FilesystemClaudeMemoryMiddleware(_FilesystemClaudeFileToolMiddleware): + """Filesystem-based memory tool middleware. + + Provides Anthropic's memory tool using local filesystem for storage. + User handles persistence via volumes, git, or other mechanisms. + Enforces /memories prefix and injects Anthropic's recommended system prompt. + + Example: + ```python + from langchain.agents import create_agent + from langchain.agents.middleware import FilesystemMemoryToolMiddleware + + agent = create_agent( + model=model, + tools=[], + middleware=[FilesystemMemoryToolMiddleware(root_path="/workspace")], + ) + ``` + """ + + def __init__( + self, + *, + root_path: str, + allowed_prefixes: list[str] | None = None, + max_file_size_mb: int = 10, + system_prompt: str = MEMORY_SYSTEM_PROMPT, + ) -> None: + """Initialize the memory middleware. + + Args: + root_path: Root directory for file operations. + allowed_prefixes: Optional list of allowed virtual path prefixes. + Defaults to ["/memories"]. + max_file_size_mb: Maximum file size in MB (default: 10). + system_prompt: System prompt to inject. Defaults to Anthropic's + recommended memory prompt. + """ + super().__init__( + tool_type=MEMORY_TOOL_TYPE, + tool_name=MEMORY_TOOL_NAME, + root_path=root_path, + allowed_prefixes=allowed_prefixes or ["/memories"], + max_file_size_mb=max_file_size_mb, + system_prompt=system_prompt, + ) + + +__all__ = [ + "AnthropicToolsState", + "FileData", + "FilesystemClaudeMemoryMiddleware", + "FilesystemClaudeTextEditorMiddleware", + "StateClaudeMemoryMiddleware", + "StateClaudeTextEditorMiddleware", +] diff --git a/libs/partners/anthropic/langchain_anthropic/middleware/bash.py b/libs/partners/anthropic/langchain_anthropic/middleware/bash.py new file mode 100644 index 00000000000..61184b1a037 --- /dev/null +++ b/libs/partners/anthropic/langchain_anthropic/middleware/bash.py @@ -0,0 +1,95 @@ +"""Anthropic-specific middleware for the Claude bash tool.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from typing import Any, Literal + +from langchain.agents.middleware.shell_tool import ShellToolMiddleware +from langchain.agents.middleware.types import ( + ModelRequest, + ModelResponse, + ToolCallRequest, +) +from langchain_core.messages import ToolMessage +from langgraph.types import Command + +_CLAUDE_BASH_DESCRIPTOR = {"type": "bash_20250124", "name": "bash"} + + +class ClaudeBashToolMiddleware(ShellToolMiddleware): + """Middleware that exposes Anthropic's native bash tool to models.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Initialize middleware without registering a client-side tool.""" + kwargs["shell_command"] = ("/bin/bash",) + super().__init__(*args, **kwargs) + # Remove the base tool so Claude's native descriptor is the sole entry. + self._tool = None # type: ignore[assignment] + self.tools = [] + + def wrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelResponse: + """Ensure the Claude bash descriptor is available to the model.""" + tools = request.tools + if all(tool is not _CLAUDE_BASH_DESCRIPTOR for tool in tools): + tools = [*tools, _CLAUDE_BASH_DESCRIPTOR] + request = request.override(tools=tools) + return handler(request) + + def wrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], Command | ToolMessage], + ) -> Command | ToolMessage: + """Intercept Claude bash tool calls and execute them locally.""" + tool_call = request.tool_call + if tool_call.get("name") != "bash": + return handler(request) + resources = self._ensure_resources(request.state) + return self._run_shell_tool( + resources, + tool_call["args"], + tool_call_id=tool_call.get("id"), + ) + + async def awrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], Awaitable[Command | ToolMessage]], + ) -> Command | ToolMessage: + """Async interception mirroring the synchronous implementation.""" + tool_call = request.tool_call + if tool_call.get("name") != "bash": + return await handler(request) + resources = self._ensure_resources(request.state) + return self._run_shell_tool( + resources, + tool_call["args"], + tool_call_id=tool_call.get("id"), + ) + + def _format_tool_message( + self, + content: str, + tool_call_id: str | None, + *, + status: Literal["success", "error"], + artifact: dict[str, Any] | None = None, + ) -> ToolMessage | str: + """Format tool responses using Claude's bash descriptor.""" + if tool_call_id is None: + return content + return ToolMessage( + content=content, + tool_call_id=tool_call_id, + name=_CLAUDE_BASH_DESCRIPTOR["name"], + status=status, + artifact=artifact or {}, + ) + + +__all__ = ["ClaudeBashToolMiddleware"] diff --git a/libs/partners/anthropic/langchain_anthropic/middleware/file_search.py b/libs/partners/anthropic/langchain_anthropic/middleware/file_search.py new file mode 100644 index 00000000000..977a19727be --- /dev/null +++ b/libs/partners/anthropic/langchain_anthropic/middleware/file_search.py @@ -0,0 +1,294 @@ +"""File search middleware for Anthropic text editor and memory tools. + +This module provides Glob and Grep search tools that operate on files stored +in state or filesystem. +""" + +from __future__ import annotations + +import fnmatch +import re +from pathlib import Path, PurePosixPath +from typing import TYPE_CHECKING, Annotated, Literal, cast + +if TYPE_CHECKING: + from typing import Any + +from langchain.agents.middleware.types import AgentMiddleware +from langchain_core.tools import InjectedToolArg, tool + +from langchain_anthropic.middleware.anthropic_tools import AnthropicToolsState + + +def _expand_include_patterns(pattern: str) -> list[str] | None: + """Expand brace patterns like ``*.{py,pyi}`` into a list of globs.""" + if "}" in pattern and "{" not in pattern: + return None + + expanded: list[str] = [] + + def _expand(current: str) -> None: + start = current.find("{") + if start == -1: + expanded.append(current) + return + + end = current.find("}", start) + if end == -1: + raise ValueError + + prefix = current[:start] + suffix = current[end + 1 :] + inner = current[start + 1 : end] + if not inner: + raise ValueError + + for option in inner.split(","): + _expand(prefix + option + suffix) + + try: + _expand(pattern) + except ValueError: + return None + + return expanded + + +def _is_valid_include_pattern(pattern: str) -> bool: + """Validate glob pattern used for include filters.""" + if not pattern: + return False + + if any(char in pattern for char in ("\x00", "\n", "\r")): + return False + + expanded = _expand_include_patterns(pattern) + if expanded is None: + return False + + try: + for candidate in expanded: + re.compile(fnmatch.translate(candidate)) + except re.error: + return False + + return True + + +def _match_include_pattern(basename: str, pattern: str) -> bool: + """Return True if the basename matches the include pattern.""" + expanded = _expand_include_patterns(pattern) + if not expanded: + return False + + return any(fnmatch.fnmatch(basename, candidate) for candidate in expanded) + + +class StateFileSearchMiddleware(AgentMiddleware): + """Provides Glob and Grep search over state-based files. + + This middleware adds two tools that search through virtual files in state: + - Glob: Fast file pattern matching by file path + - Grep: Fast content search using regular expressions + + Example: + ```python + from langchain.agents import create_agent + from langchain.agents.middleware import ( + StateTextEditorToolMiddleware, + StateFileSearchMiddleware, + ) + + agent = create_agent( + model=model, + tools=[], + middleware=[ + StateTextEditorToolMiddleware(), + StateFileSearchMiddleware(), + ], + ) + ``` + """ + + state_schema = AnthropicToolsState + + def __init__( + self, + *, + state_key: str = "text_editor_files", + ) -> None: + """Initialize the search middleware. + + Args: + state_key: State key to search (default: "text_editor_files"). + Use "memory_files" to search memory tool files. + """ + self.state_key = state_key + + # Create tool instances + @tool + def glob_search( # noqa: D417 + pattern: str, + path: str = "/", + state: Annotated[AnthropicToolsState, InjectedToolArg] = None, # type: ignore[assignment] + ) -> str: + """Fast file pattern matching tool that works with any codebase size. + + Supports glob patterns like **/*.js or src/**/*.ts. + Returns matching file paths sorted by modification time. + Use this tool when you need to find files by name patterns. + + Args: + pattern: The glob pattern to match files against. + path: The directory to search in. If not specified, searches from root. + + Returns: + Newline-separated list of matching file paths, sorted by modification + time (most recently modified first). Returns "No files found" if no + matches. + """ + # Normalize base path + base_path = path if path.startswith("/") else "/" + path + + # Get files from state + files = cast("dict[str, Any]", state.get(self.state_key, {})) + + # Match files + matches = [] + for file_path, file_data in files.items(): + if file_path.startswith(base_path): + # Get relative path from base + if base_path == "/": + relative = file_path[1:] # Remove leading / + elif file_path == base_path: + relative = Path(file_path).name + elif file_path.startswith(base_path + "/"): + relative = file_path[len(base_path) + 1 :] + else: + continue + + # Match against pattern + # Handle ** pattern which requires special care + # PurePosixPath.match doesn't match single-level paths + # against **/pattern + is_match = PurePosixPath(relative).match(pattern) + if not is_match and pattern.startswith("**/"): + # Also try matching without the **/ prefix for files in base dir + is_match = PurePosixPath(relative).match(pattern[3:]) + + if is_match: + matches.append((file_path, file_data["modified_at"])) + + if not matches: + return "No files found" + + # Sort by modification time + matches.sort(key=lambda x: x[1], reverse=True) + file_paths = [path for path, _ in matches] + + return "\n".join(file_paths) + + @tool + def grep_search( # noqa: D417 + pattern: str, + path: str = "/", + include: str | None = None, + output_mode: Literal[ + "files_with_matches", "content", "count" + ] = "files_with_matches", + state: Annotated[AnthropicToolsState, InjectedToolArg] = None, # type: ignore[assignment] + ) -> str: + """Fast content search tool that works with any codebase size. + + Searches file contents using regular expressions. Supports full regex + syntax and filters files by pattern with the include parameter. + + Args: + pattern: The regular expression pattern to search for in file contents. + path: The directory to search in. If not specified, searches from root. + include: File pattern to filter (e.g., "*.js", "*.{ts,tsx}"). + output_mode: Output format: + - "files_with_matches": Only file paths containing matches (default) + - "content": Matching lines with file:line:content format + - "count": Count of matches per file + + Returns: + Search results formatted according to output_mode. Returns "No matches + found" if no results. + """ + # Normalize base path + base_path = path if path.startswith("/") else "/" + path + + # Compile regex pattern (for validation) + try: + regex = re.compile(pattern) + except re.error as e: + return f"Invalid regex pattern: {e}" + + if include and not _is_valid_include_pattern(include): + return "Invalid include pattern" + + # Search files + files = cast("dict[str, Any]", state.get(self.state_key, {})) + results: dict[str, list[tuple[int, str]]] = {} + + for file_path, file_data in files.items(): + if not file_path.startswith(base_path): + continue + + # Check include filter + if include: + basename = Path(file_path).name + if not _match_include_pattern(basename, include): + continue + + # Search file content + for line_num, line in enumerate(file_data["content"], 1): + if regex.search(line): + if file_path not in results: + results[file_path] = [] + results[file_path].append((line_num, line)) + + if not results: + return "No matches found" + + # Format output based on mode + return self._format_grep_results(results, output_mode) + + self.glob_search = glob_search + self.grep_search = grep_search + self.tools = [glob_search, grep_search] + + def _format_grep_results( + self, + results: dict[str, list[tuple[int, str]]], + output_mode: str, + ) -> str: + """Format grep results based on output mode.""" + if output_mode == "files_with_matches": + # Just return file paths + return "\n".join(sorted(results.keys())) + + if output_mode == "content": + # Return file:line:content format + lines = [] + for file_path in sorted(results.keys()): + for line_num, line in results[file_path]: + lines.append(f"{file_path}:{line_num}:{line}") + return "\n".join(lines) + + if output_mode == "count": + # Return file:count format + lines = [] + for file_path in sorted(results.keys()): + count = len(results[file_path]) + lines.append(f"{file_path}:{count}") + return "\n".join(lines) + + # Default to files_with_matches + return "\n".join(sorted(results.keys())) + + +__all__ = [ + "StateFileSearchMiddleware", +] diff --git a/libs/partners/anthropic/langchain_anthropic/middleware/prompt_caching.py b/libs/partners/anthropic/langchain_anthropic/middleware/prompt_caching.py new file mode 100644 index 00000000000..77b4dc9d85c --- /dev/null +++ b/libs/partners/anthropic/langchain_anthropic/middleware/prompt_caching.py @@ -0,0 +1,143 @@ +"""Anthropic prompt caching middleware. + +Requires: + - langchain: For agent middleware framework + - langchain-anthropic: For ChatAnthropic model (already a dependency) +""" + +from collections.abc import Awaitable, Callable +from typing import Literal +from warnings import warn + +from langchain_anthropic.chat_models import ChatAnthropic + +try: + from langchain.agents.middleware.types import ( + AgentMiddleware, + ModelCallResult, + ModelRequest, + ModelResponse, + ) +except ImportError as e: + msg = ( + "AnthropicPromptCachingMiddleware requires 'langchain' to be installed. " + "This middleware is designed for use with LangChain agents. " + "Install it with: pip install langchain" + ) + raise ImportError(msg) from e + + +class AnthropicPromptCachingMiddleware(AgentMiddleware): + """Prompt Caching Middleware. + + Optimizes API usage by caching conversation prefixes for Anthropic models. + + Requires both 'langchain' and 'langchain-anthropic' packages to be installed. + + Learn more about Anthropic prompt caching + [here](https://docs.claude.com/en/docs/build-with-claude/prompt-caching). + """ + + def __init__( + self, + type: Literal["ephemeral"] = "ephemeral", # noqa: A002 + ttl: Literal["5m", "1h"] = "5m", + min_messages_to_cache: int = 0, + unsupported_model_behavior: Literal["ignore", "warn", "raise"] = "warn", + ) -> None: + """Initialize the middleware with cache control settings. + + Args: + type: The type of cache to use, only "ephemeral" is supported. + ttl: The time to live for the cache, only "5m" and "1h" are + supported. + min_messages_to_cache: The minimum number of messages until the + cache is used, default is 0. + unsupported_model_behavior: The behavior to take when an + unsupported model is used. "ignore" will ignore the unsupported + model and continue without caching. "warn" will warn the user + and continue without caching. "raise" will raise an error and + stop the agent. + """ + self.type = type + self.ttl = ttl + self.min_messages_to_cache = min_messages_to_cache + self.unsupported_model_behavior = unsupported_model_behavior + + def _should_apply_caching(self, request: ModelRequest) -> bool: + """Check if caching should be applied to the request. + + Args: + request: The model request to check. + + Returns: + True if caching should be applied, False otherwise. + + Raises: + ValueError: If model is unsupported and behavior is set to "raise". + """ + if not isinstance(request.model, ChatAnthropic): + msg = ( + "AnthropicPromptCachingMiddleware caching middleware only supports " + f"Anthropic models, not instances of {type(request.model)}" + ) + if self.unsupported_model_behavior == "raise": + raise ValueError(msg) + if self.unsupported_model_behavior == "warn": + warn(msg, stacklevel=3) + return False + + messages_count = ( + len(request.messages) + 1 + if request.system_prompt + else len(request.messages) + ) + return messages_count >= self.min_messages_to_cache + + def _apply_cache_control(self, request: ModelRequest) -> None: + """Apply cache control settings to the request. + + Args: + request: The model request to modify. + """ + request.model_settings["cache_control"] = {"type": self.type, "ttl": self.ttl} + + def wrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], ModelResponse], + ) -> ModelCallResult: + """Modify the model request to add cache control blocks. + + Args: + request: The model request to potentially modify. + handler: The handler to execute the model request. + + Returns: + The model response from the handler. + """ + if not self._should_apply_caching(request): + return handler(request) + + self._apply_cache_control(request) + return handler(request) + + async def awrap_model_call( + self, + request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]], + ) -> ModelCallResult: + """Modify the model request to add cache control blocks (async version). + + Args: + request: The model request to potentially modify. + handler: The async handler to execute the model request. + + Returns: + The model response from the handler. + """ + if not self._should_apply_caching(request): + return await handler(request) + + self._apply_cache_control(request) + return await handler(request) diff --git a/libs/partners/anthropic/langchain_anthropic/output_parsers.py b/libs/partners/anthropic/langchain_anthropic/output_parsers.py index 20e60fb64cb..57e2121c5e5 100644 --- a/libs/partners/anthropic/langchain_anthropic/output_parsers.py +++ b/libs/partners/anthropic/langchain_anthropic/output_parsers.py @@ -29,7 +29,7 @@ class ToolsOutputParser(BaseGenerationOutputParser): """Parse a list of candidate model Generations into a specific format. Args: - result: A list of Generations to be parsed. The Generations are assumed + result: A list of `Generation` to be parsed. The Generations are assumed to be different candidate outputs for a single model input. partial: (Not used) Whether the result is a partial result. If `True`, the parser may return a partial result, which may not be complete or valid. diff --git a/libs/partners/anthropic/pyproject.toml b/libs/partners/anthropic/pyproject.toml index 7eb22a60be2..283a100679b 100644 --- a/libs/partners/anthropic/pyproject.toml +++ b/libs/partners/anthropic/pyproject.toml @@ -3,27 +3,28 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-anthropic" +description = "Integration package connecting Claude (Anthropic) APIs and LangChain" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.1" requires-python = ">=3.10.0,<4.0.0" dependencies = [ "anthropic>=0.69.0,<1.0.0", - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", "pydantic>=2.7.4,<3.0.0", ] -name = "langchain-anthropic" -version = "1.0.0a3" -description = "Integration package connecting Claude (Anthropic) APIs and LangChain" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/anthropic" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/anthropic" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-anthropic%22" -docs = "https://reference.langchain.com/python/integrations/langchain_anthropic/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/anthropic" +Documentation = "https://reference.langchain.com/python/integrations/langchain_anthropic/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/anthropic" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-anthropic%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ @@ -37,9 +38,12 @@ test = [ "pytest-retry>=1.7.0,<1.8.0", "pytest-timeout>=2.3.1,<3.0.0", "pytest-socket>=0.7.0,<1.0.0", + "pytest-xdist>=3.8.0,<4.0.0", "vcrpy>=7.0.0,<8.0.0", + "langgraph-prebuilt>=0.7.0a2", # set explicitly until we have a stable version "langchain-core", "langchain-tests", + "langchain", ] lint = ["ruff>=0.13.1,<0.14.0"] dev = ["langchain-core"] @@ -54,6 +58,7 @@ typing = [ [tool.uv.sources] langchain-core = { path = "../../core", editable = true } langchain-tests = { path = "../../standard-tests", editable = true } +langchain = { path = "../../langchain_v1", editable = true } [tool.mypy] disallow_untyped_defs = "True" diff --git a/libs/partners/anthropic/tests/cassettes/TestAnthropicStandard.test_stream_time.yaml.gz b/libs/partners/anthropic/tests/cassettes/TestAnthropicStandard.test_stream_time.yaml.gz index 1b9b181b85f..c1181a71382 100644 Binary files a/libs/partners/anthropic/tests/cassettes/TestAnthropicStandard.test_stream_time.yaml.gz and b/libs/partners/anthropic/tests/cassettes/TestAnthropicStandard.test_stream_time.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_agent_loop.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_agent_loop.yaml.gz index d53dffb02da..5bba3864cd2 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_agent_loop.yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_agent_loop.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_agent_loop_streaming.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_agent_loop_streaming.yaml.gz index 8e76e86628f..7dc728333be 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_agent_loop_streaming.yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_agent_loop_streaming.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_citations.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_citations.yaml.gz index c704ad451e3..dd957413dac 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_citations.yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_citations.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_code_execution.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_code_execution.yaml.gz deleted file mode 100644 index f022211c387..00000000000 Binary files a/libs/partners/anthropic/tests/cassettes/test_code_execution.yaml.gz and /dev/null differ diff --git a/libs/partners/anthropic/tests/cassettes/test_code_execution[v0].yaml.gz b/libs/partners/anthropic/tests/cassettes/test_code_execution[v0].yaml.gz index b06c59fd788..f3e5eab0682 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_code_execution[v0].yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_code_execution[v0].yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_code_execution[v1].yaml.gz b/libs/partners/anthropic/tests/cassettes/test_code_execution[v1].yaml.gz index 3edf147f8c9..56bf786f3d7 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_code_execution[v1].yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_code_execution[v1].yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_context_management.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_context_management.yaml.gz index 096e6a0dd53..620cc31be4d 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_context_management.yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_context_management.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_redacted_thinking.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_redacted_thinking.yaml.gz index f4295b91e62..8a8ef497058 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_redacted_thinking.yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_redacted_thinking.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_remote_mcp.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_remote_mcp.yaml.gz index c082b738dc8..7bc6b916593 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_remote_mcp.yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_remote_mcp.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_search_result_tool_message.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_search_result_tool_message.yaml.gz new file mode 100644 index 00000000000..4f80f569039 Binary files /dev/null and b/libs/partners/anthropic/tests/cassettes/test_search_result_tool_message.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_thinking.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_thinking.yaml.gz index 5c92ad877ce..3da136f002d 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_thinking.yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_thinking.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_web_fetch.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_web_fetch.yaml.gz index 56db91c639c..a9a0a51b9b5 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_web_fetch.yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_web_fetch.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_web_fetch_v1.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_web_fetch_v1.yaml.gz index 349d79944f5..a2a4679c15f 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_web_fetch_v1.yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_web_fetch_v1.yaml.gz differ diff --git a/libs/partners/anthropic/tests/cassettes/test_web_search.yaml.gz b/libs/partners/anthropic/tests/cassettes/test_web_search.yaml.gz index 6802f280403..e8e4dea5bff 100644 Binary files a/libs/partners/anthropic/tests/cassettes/test_web_search.yaml.gz and b/libs/partners/anthropic/tests/cassettes/test_web_search.yaml.gz differ diff --git a/libs/partners/anthropic/tests/integration_tests/test_chat_models.py b/libs/partners/anthropic/tests/integration_tests/test_chat_models.py index fc2f9f3cf8a..f8f5111da39 100644 --- a/libs/partners/anthropic/tests/integration_tests/test_chat_models.py +++ b/libs/partners/anthropic/tests/integration_tests/test_chat_models.py @@ -32,8 +32,7 @@ from langchain_anthropic import ChatAnthropic from langchain_anthropic._compat import _convert_from_v1_to_anthropic from tests.unit_tests._utils import FakeCallbackHandler -MODEL_NAME = "claude-3-5-haiku-latest" -IMAGE_MODEL_NAME = "claude-3-5-haiku-latest" +MODEL_NAME = "claude-3-5-haiku-20241022" def test_stream() -> None: @@ -162,7 +161,7 @@ async def test_stream_usage_override() -> None: async def test_abatch() -> None: - """Test streaming tokens from ChatAnthropic.""" + """Test streaming tokens.""" llm = ChatAnthropic(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg] result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"]) @@ -171,7 +170,7 @@ async def test_abatch() -> None: async def test_abatch_tags() -> None: - """Test batch tokens from ChatAnthropic.""" + """Test batch tokens.""" llm = ChatAnthropic(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg] result = await llm.abatch( @@ -232,7 +231,7 @@ async def test_async_tool_use() -> None: def test_batch() -> None: - """Test batch tokens from ChatAnthropic.""" + """Test batch tokens.""" llm = ChatAnthropic(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg] result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"]) @@ -241,7 +240,7 @@ def test_batch() -> None: async def test_ainvoke() -> None: - """Test invoke tokens from ChatAnthropic.""" + """Test invoke tokens.""" llm = ChatAnthropic(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg] result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]}) @@ -250,7 +249,7 @@ async def test_ainvoke() -> None: def test_invoke() -> None: - """Test invoke tokens from ChatAnthropic.""" + """Test invoke tokens.""" llm = ChatAnthropic(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg] result = llm.invoke("I'm Pickle Rick", config={"tags": ["foo"]}) @@ -396,7 +395,7 @@ async def test_anthropic_async_streaming_callback() -> None: def test_anthropic_multimodal() -> None: """Test that multimodal inputs are handled correctly.""" - chat = ChatAnthropic(model=IMAGE_MODEL_NAME) # type: ignore[call-arg] + chat = ChatAnthropic(model=MODEL_NAME) # type: ignore[call-arg] messages: list[BaseMessage] = [ HumanMessage( content=[ @@ -534,7 +533,7 @@ def test_tool_use() -> None: assert content_blocks[1]["args"] # Testing token-efficient tools - # https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/token-efficient-tool-use + # https://docs.claude.com/en/docs/agents-and-tools/tool-use/token-efficient-tool-use assert gathered.usage_metadata assert response.usage_metadata assert ( @@ -562,9 +561,9 @@ def test_tool_use() -> None: assert len(chunks) > 1 -def test_builtin_tools() -> None: - llm = ChatAnthropic(model="claude-3-7-sonnet-20250219") # type: ignore[call-arg] - tool = {"type": "text_editor_20250124", "name": "str_replace_editor"} +def test_builtin_tools_text_editor() -> None: + llm = ChatAnthropic(model="claude-sonnet-4-5-20250929") # type: ignore[call-arg] + tool = {"type": "text_editor_20250728", "name": "str_replace_based_edit_tool"} llm_with_tools = llm.bind_tools([tool]) response = llm_with_tools.invoke( "There's a syntax error in my primes.py file. Can you help me fix it?", @@ -577,7 +576,7 @@ def test_builtin_tools() -> None: assert content_blocks[0]["type"] == "text" assert content_blocks[0]["text"] assert content_blocks[1]["type"] == "tool_call" - assert content_blocks[1]["name"] == "str_replace_editor" + assert content_blocks[1]["name"] == "str_replace_based_edit_tool" class GenerateUsername(BaseModel): @@ -588,7 +587,7 @@ class GenerateUsername(BaseModel): def test_disable_parallel_tool_calling() -> None: - llm = ChatAnthropic(model="claude-3-5-sonnet-20241022") # type: ignore[call-arg] + llm = ChatAnthropic(model=MODEL_NAME) # type: ignore[call-arg] llm_with_tools = llm.bind_tools([GenerateUsername], parallel_tool_calls=False) result = llm_with_tools.invoke( "Use the GenerateUsername tool to generate user names for:\n\n" @@ -607,7 +606,7 @@ def test_anthropic_with_empty_text_block() -> None: """Type the given letter.""" return "OK" - model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0).bind_tools( # type: ignore[call-arg] + model = ChatAnthropic(model=MODEL_NAME, temperature=0).bind_tools( # type: ignore[call-arg] [type_letter], ) @@ -646,7 +645,7 @@ def test_anthropic_with_empty_text_block() -> None: def test_with_structured_output() -> None: llm = ChatAnthropic( - model="claude-3-opus-20240229", # type: ignore[call-arg] + model=MODEL_NAME, # type: ignore[call-arg] ) structured_llm = llm.with_structured_output( @@ -665,7 +664,7 @@ def test_with_structured_output() -> None: def test_get_num_tokens_from_messages() -> None: - llm = ChatAnthropic(model="claude-3-5-sonnet-20241022") # type: ignore[call-arg] + llm = ChatAnthropic(model=MODEL_NAME) # type: ignore[call-arg] # Test simple case messages = [ @@ -739,7 +738,7 @@ def test_pdf_document_input() -> None: url = "https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf" data = b64encode(requests.get(url, timeout=10).content).decode() - result = ChatAnthropic(model=IMAGE_MODEL_NAME).invoke( # type: ignore[call-arg] + result = ChatAnthropic(model=MODEL_NAME).invoke( # type: ignore[call-arg] [ HumanMessage( [ @@ -770,7 +769,7 @@ def test_agent_loop(output_version: Literal["v0", "v1"]) -> None: """Get the weather for a location.""" return "It's sunny." - llm = ChatAnthropic(model="claude-3-5-haiku-latest", output_version=output_version) # type: ignore[call-arg] + llm = ChatAnthropic(model=MODEL_NAME, output_version=output_version) # type: ignore[call-arg] llm_with_tools = llm.bind_tools([get_weather]) input_message = HumanMessage("What is the weather in San Francisco, CA?") tool_call_message = llm_with_tools.invoke([input_message]) @@ -800,7 +799,7 @@ def test_agent_loop_streaming(output_version: Literal["v0", "v1"]) -> None: return "It's sunny." llm = ChatAnthropic( - model="claude-3-5-haiku-latest", + model=MODEL_NAME, streaming=True, output_version=output_version, # type: ignore[call-arg] ) @@ -828,7 +827,7 @@ def test_agent_loop_streaming(output_version: Literal["v0", "v1"]) -> None: @pytest.mark.vcr @pytest.mark.parametrize("output_version", ["v0", "v1"]) def test_citations(output_version: Literal["v0", "v1"]) -> None: - llm = ChatAnthropic(model="claude-3-5-haiku-latest", output_version=output_version) # type: ignore[call-arg] + llm = ChatAnthropic(model=MODEL_NAME, output_version=output_version) # type: ignore[call-arg] messages = [ { "role": "user", @@ -879,7 +878,7 @@ def test_citations(output_version: Literal["v0", "v1"]) -> None: @pytest.mark.vcr def test_thinking() -> None: llm = ChatAnthropic( - model="claude-3-7-sonnet-latest", # type: ignore[call-arg] + model="claude-sonnet-4-5-20250929", # type: ignore[call-arg] max_tokens=5_000, # type: ignore[call-arg] thinking={"type": "enabled", "budget_tokens": 2_000}, ) @@ -921,7 +920,7 @@ def test_thinking() -> None: @pytest.mark.vcr def test_thinking_v1() -> None: llm = ChatAnthropic( - model="claude-3-7-sonnet-latest", # type: ignore[call-arg] + model="claude-sonnet-4-5-20250929", # type: ignore[call-arg] max_tokens=5_000, # type: ignore[call-arg] thinking={"type": "enabled", "budget_tokens": 2_000}, output_version="v1", @@ -967,6 +966,8 @@ def test_thinking_v1() -> None: @pytest.mark.parametrize("output_version", ["v0", "v1"]) def test_redacted_thinking(output_version: Literal["v0", "v1"]) -> None: llm = ChatAnthropic( + # It appears that Sonnet 4.5 either: isn't returning redacted thinking blocks, + # or the magic string is broken? Retry later once 3-7 finally removed model="claude-3-7-sonnet-latest", # type: ignore[call-arg] max_tokens=5_000, # type: ignore[call-arg] thinking={"type": "enabled", "budget_tokens": 2_000}, @@ -1029,7 +1030,7 @@ def test_redacted_thinking(output_version: Literal["v0", "v1"]) -> None: def test_structured_output_thinking_enabled() -> None: llm = ChatAnthropic( - model="claude-3-7-sonnet-latest", # type: ignore[call-arg] + model="claude-sonnet-4-5-20250929", # type: ignore[call-arg] max_tokens=5_000, # type: ignore[call-arg] thinking={"type": "enabled", "budget_tokens": 2_000}, ) @@ -1052,7 +1053,7 @@ def test_structured_output_thinking_force_tool_use() -> None: # when `thinking` is enabled. When this test fails, it means that the feature # is supported and the workarounds in `with_structured_output` should be removed. llm = ChatAnthropic( - model="claude-3-7-sonnet-latest", # type: ignore[call-arg] + model="claude-sonnet-4-5-20250929", # type: ignore[call-arg] max_tokens=5_000, # type: ignore[call-arg] thinking={"type": "enabled", "budget_tokens": 2_000}, ).bind_tools( @@ -1077,14 +1078,14 @@ def test_image_tool_calling() -> None: "text": "what's your favorite color in this image", }, ] - image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + image_url = "https://raw.githubusercontent.com/langchain-ai/docs/4d11d08b6b0e210bd456943f7a22febbd168b543/src/images/agentic-rag-output.png" image_data = b64encode(httpx.get(image_url).content).decode("utf-8") human_content.append( { "type": "image", "source": { "type": "base64", - "media_type": "image/jpeg", + "media_type": "image/png", "data": image_data, }, }, @@ -1097,7 +1098,7 @@ def test_image_tool_calling() -> None: {"type": "text", "text": "Hmm let me think about that"}, { "type": "tool_use", - "input": {"fav_color": "green"}, + "input": {"fav_color": "purple"}, "id": "foo", "name": "color_picker", }, @@ -1111,7 +1112,7 @@ def test_image_tool_calling() -> None: "content": [ { "type": "text", - "text": "green is a great pick! that's my sister's favorite color", # noqa: E501 + "text": "purple is a great pick! that's my sister's favorite color", # noqa: E501 }, ], "is_error": False, @@ -1120,8 +1121,8 @@ def test_image_tool_calling() -> None: ], ), ] - llm = ChatAnthropic(model="claude-3-5-haiku-latest") # type: ignore[call-arg] - llm.bind_tools([color_picker]).invoke(messages) + llm = ChatAnthropic(model=MODEL_NAME) # type: ignore[call-arg] + _ = llm.bind_tools([color_picker]).invoke(messages) @pytest.mark.default_cassette("test_web_search.yaml.gz") @@ -1129,7 +1130,7 @@ def test_image_tool_calling() -> None: @pytest.mark.parametrize("output_version", ["v0", "v1"]) def test_web_search(output_version: Literal["v0", "v1"]) -> None: llm = ChatAnthropic( - model="claude-3-5-haiku-latest", # type: ignore[call-arg] + model=MODEL_NAME, # type: ignore[call-arg] max_tokens=1024, output_version=output_version, ) @@ -1178,68 +1179,6 @@ def test_web_search(output_version: Literal["v0", "v1"]) -> None: ) -@pytest.mark.default_cassette("test_web_fetch_v1.yaml.gz") -@pytest.mark.vcr -@pytest.mark.parametrize("output_version", ["v0", "v1"]) -def test_web_fetch_v1(output_version: Literal["v0", "v1"]) -> None: - """Test that http calls are unchanged between v0 and v1.""" - llm = ChatAnthropic( - model="claude-3-5-haiku-latest", # type: ignore[call-arg] - betas=["web-fetch-2025-09-10"], - output_version=output_version, - ) - - if output_version == "v0": - call_key = "server_tool_use" - result_key = "web_fetch_tool_result" - else: - # v1 - call_key = "server_tool_call" - result_key = "server_tool_result" - - tool = { - "type": "web_fetch_20250910", - "name": "web_fetch", - "max_uses": 1, - "citations": {"enabled": True}, - } - llm_with_tools = llm.bind_tools([tool]) - - input_message = { - "role": "user", - "content": [ - { - "type": "text", - "text": "Fetch the content at https://docs.langchain.com and analyze", - }, - ], - } - response = llm_with_tools.invoke([input_message]) - assert all(isinstance(block, dict) for block in response.content) - block_types = {block["type"] for block in response.content} # type: ignore[index] - assert block_types == {"text", call_key, result_key} - - # Test streaming - full: BaseMessageChunk | None = None - for chunk in llm_with_tools.stream([input_message]): - assert isinstance(chunk, AIMessageChunk) - full = chunk if full is None else full + chunk - - assert isinstance(full, AIMessageChunk) - assert isinstance(full.content, list) - block_types = {block["type"] for block in full.content} # type: ignore[index] - assert block_types == {"text", call_key, result_key} - - # Test we can pass back in - next_message = { - "role": "user", - "content": "What does the site you just fetched say about models?", - } - _ = llm_with_tools.invoke( - [input_message, full, next_message], - ) - - @pytest.mark.vcr def test_web_fetch() -> None: """Note: this is a beta feature. @@ -1247,7 +1186,7 @@ def test_web_fetch() -> None: TODO: Update to remove beta once it's generally available. """ llm = ChatAnthropic( - model="claude-3-5-haiku-latest", # type: ignore[call-arg] + model=MODEL_NAME, # type: ignore[call-arg] max_tokens=1024, betas=["web-fetch-2025-09-10"], ) @@ -1418,7 +1357,7 @@ def test_web_fetch() -> None: "role": "user", "content": ( "Fetch https://docs.langchain.com and then try to fetch " - "https://python.langchain.com" + "https://langchain.com" ), } max_uses_response = llm_with_tools.invoke([multi_fetch_message]) @@ -1502,6 +1441,68 @@ def test_web_fetch() -> None: assert "data" in content["content"]["source"] +@pytest.mark.default_cassette("test_web_fetch_v1.yaml.gz") +@pytest.mark.vcr +@pytest.mark.parametrize("output_version", ["v0", "v1"]) +def test_web_fetch_v1(output_version: Literal["v0", "v1"]) -> None: + """Test that http calls are unchanged between v0 and v1.""" + llm = ChatAnthropic( + model=MODEL_NAME, # type: ignore[call-arg] + betas=["web-fetch-2025-09-10"], + output_version=output_version, + ) + + if output_version == "v0": + call_key = "server_tool_use" + result_key = "web_fetch_tool_result" + else: + # v1 + call_key = "server_tool_call" + result_key = "server_tool_result" + + tool = { + "type": "web_fetch_20250910", + "name": "web_fetch", + "max_uses": 1, + "citations": {"enabled": True}, + } + llm_with_tools = llm.bind_tools([tool]) + + input_message = { + "role": "user", + "content": [ + { + "type": "text", + "text": "Fetch the content at https://docs.langchain.com and analyze", + }, + ], + } + response = llm_with_tools.invoke([input_message]) + assert all(isinstance(block, dict) for block in response.content) + block_types = {block["type"] for block in response.content} # type: ignore[index] + assert block_types == {"text", call_key, result_key} + + # Test streaming + full: BaseMessageChunk | None = None + for chunk in llm_with_tools.stream([input_message]): + assert isinstance(chunk, AIMessageChunk) + full = chunk if full is None else full + chunk + + assert isinstance(full, AIMessageChunk) + assert isinstance(full.content, list) + block_types = {block["type"] for block in full.content} # type: ignore[index] + assert block_types == {"text", call_key, result_key} + + # Test we can pass back in + next_message = { + "role": "user", + "content": "What does the site you just fetched say about models?", + } + _ = llm_with_tools.invoke( + [input_message, full, next_message], + ) + + @pytest.mark.vcr @pytest.mark.parametrize("output_version", ["v0", "v1"]) def test_code_execution(output_version: Literal["v0", "v1"]) -> None: @@ -1510,9 +1511,8 @@ def test_code_execution(output_version: Literal["v0", "v1"]) -> None: TODO: Update to remove beta once generally available. """ llm = ChatAnthropic( - model="claude-sonnet-4-20250514", # type: ignore[call-arg] + model=MODEL_NAME, # type: ignore[call-arg] betas=["code-execution-2025-05-22"], - max_tokens=10_000, # type: ignore[call-arg] output_version=output_version, ) @@ -1581,7 +1581,7 @@ def test_remote_mcp(output_version: Literal["v0", "v1"]) -> None: ] llm = ChatAnthropic( - model="claude-sonnet-4-20250514", # type: ignore[call-arg] + model="claude-sonnet-4-5-20250929", # type: ignore[call-arg] betas=["mcp-client-2025-04-04"], mcp_servers=mcp_servers, max_tokens=10_000, # type: ignore[call-arg] @@ -1642,7 +1642,7 @@ def test_files_api_image(block_format: str) -> None: if not image_file_id: pytest.skip() llm = ChatAnthropic( - model="claude-sonnet-4-20250514", # type: ignore[call-arg] + model=MODEL_NAME, # type: ignore[call-arg] betas=["files-api-2025-04-14"], ) if block_format == "anthropic": @@ -1679,7 +1679,7 @@ def test_files_api_pdf(block_format: str) -> None: if not pdf_file_id: pytest.skip() llm = ChatAnthropic( - model="claude-sonnet-4-20250514", # type: ignore[call-arg] + model=MODEL_NAME, # type: ignore[call-arg] betas=["files-api-2025-04-14"], ) if block_format == "anthropic": @@ -1700,10 +1700,11 @@ def test_files_api_pdf(block_format: str) -> None: _ = llm.invoke([input_message]) +@pytest.mark.vcr def test_search_result_tool_message() -> None: """Test that we can pass a search result tool message to the model.""" llm = ChatAnthropic( - model="claude-3-5-haiku-latest", # type: ignore[call-arg] + model=MODEL_NAME, # type: ignore[call-arg] ) @tool @@ -1760,7 +1761,7 @@ def test_search_result_tool_message() -> None: def test_search_result_top_level() -> None: llm = ChatAnthropic( - model="claude-3-5-haiku-latest", # type: ignore[call-arg] + model=MODEL_NAME, # type: ignore[call-arg] ) input_message = HumanMessage( [ @@ -1854,6 +1855,6 @@ def test_context_management() -> None: def test_async_shared_client() -> None: - llm = ChatAnthropic(model="claude-3-5-haiku-latest") # type: ignore[call-arg] + llm = ChatAnthropic(model=MODEL_NAME) # type: ignore[call-arg] _ = asyncio.run(llm.ainvoke("Hello")) _ = asyncio.run(llm.ainvoke("Hello")) diff --git a/libs/partners/anthropic/tests/integration_tests/test_llms.py b/libs/partners/anthropic/tests/integration_tests/test_llms.py index 1e8a4332350..2bb82797442 100644 --- a/libs/partners/anthropic/tests/integration_tests/test_llms.py +++ b/libs/partners/anthropic/tests/integration_tests/test_llms.py @@ -9,7 +9,7 @@ from langchain_core.outputs import LLMResult from langchain_anthropic import AnthropicLLM from tests.unit_tests._utils import FakeCallbackHandler -MODEL = "claude-3-7-sonnet-latest" +MODEL = "claude-sonnet-4-5-20250929" @pytest.mark.requires("anthropic") diff --git a/libs/partners/anthropic/tests/integration_tests/test_standard.py b/libs/partners/anthropic/tests/integration_tests/test_standard.py index c9c0492e123..91b2a86ac0a 100644 --- a/libs/partners/anthropic/tests/integration_tests/test_standard.py +++ b/libs/partners/anthropic/tests/integration_tests/test_standard.py @@ -11,11 +11,11 @@ from langchain_anthropic import ChatAnthropic REPO_ROOT_DIR = Path(__file__).parents[5] -MODEL = "claude-3-5-haiku-latest" +MODEL = "claude-3-5-haiku-20241022" class TestAnthropicStandard(ChatModelIntegrationTests): - """Use the standard ChatModel integration tests against the ChatAnthropic class.""" + """Use standard chat model integration tests against the `ChatAnthropic` class.""" @property def chat_model_class(self) -> type[BaseChatModel]: diff --git a/libs/partners/anthropic/tests/unit_tests/middleware/__init__.py b/libs/partners/anthropic/tests/unit_tests/middleware/__init__.py new file mode 100644 index 00000000000..89800395de1 --- /dev/null +++ b/libs/partners/anthropic/tests/unit_tests/middleware/__init__.py @@ -0,0 +1 @@ +"""Tests for Anthropic middleware.""" diff --git a/libs/partners/anthropic/tests/unit_tests/middleware/test_anthropic_tools.py b/libs/partners/anthropic/tests/unit_tests/middleware/test_anthropic_tools.py new file mode 100644 index 00000000000..85d824d93da --- /dev/null +++ b/libs/partners/anthropic/tests/unit_tests/middleware/test_anthropic_tools.py @@ -0,0 +1,281 @@ +"""Unit tests for Anthropic text editor and memory tool middleware.""" + +import pytest +from langchain_core.messages import ToolMessage +from langgraph.types import Command + +from langchain_anthropic.middleware.anthropic_tools import ( + AnthropicToolsState, + StateClaudeMemoryMiddleware, + StateClaudeTextEditorMiddleware, + _validate_path, +) + + +class TestPathValidation: + """Test path validation and security.""" + + def test_basic_path_normalization(self) -> None: + """Test basic path normalization.""" + assert _validate_path("/foo/bar") == "/foo/bar" + assert _validate_path("foo/bar") == "/foo/bar" + assert _validate_path("/foo//bar") == "/foo/bar" + assert _validate_path("/foo/./bar") == "/foo/bar" + + def test_path_traversal_blocked(self) -> None: + """Test that path traversal attempts are blocked.""" + with pytest.raises(ValueError, match="Path traversal not allowed"): + _validate_path("/foo/../etc/passwd") + + with pytest.raises(ValueError, match="Path traversal not allowed"): + _validate_path("../etc/passwd") + + with pytest.raises(ValueError, match="Path traversal not allowed"): + _validate_path("~/.ssh/id_rsa") + + def test_allowed_prefixes(self) -> None: + """Test path prefix validation.""" + # Should pass + assert ( + _validate_path("/workspace/file.txt", allowed_prefixes=["/workspace"]) + == "/workspace/file.txt" + ) + + # Should fail + with pytest.raises(ValueError, match="Path must start with"): + _validate_path("/etc/passwd", allowed_prefixes=["/workspace"]) + + with pytest.raises(ValueError, match="Path must start with"): + _validate_path( + "/workspacemalicious/file.txt", allowed_prefixes=["/workspace/"] + ) + + def test_memories_prefix(self) -> None: + """Test /memories prefix validation for memory tools.""" + assert ( + _validate_path("/memories/notes.txt", allowed_prefixes=["/memories"]) + == "/memories/notes.txt" + ) + + with pytest.raises(ValueError, match="Path must start with"): + _validate_path("/other/notes.txt", allowed_prefixes=["/memories"]) + + +class TestTextEditorMiddleware: + """Test text editor middleware functionality.""" + + def test_middleware_initialization(self) -> None: + """Test middleware initializes correctly.""" + middleware = StateClaudeTextEditorMiddleware() + assert middleware.state_schema == AnthropicToolsState + assert middleware.tool_type == "text_editor_20250728" + assert middleware.tool_name == "str_replace_based_edit_tool" + assert middleware.state_key == "text_editor_files" + + # With path restrictions + middleware = StateClaudeTextEditorMiddleware( + allowed_path_prefixes=["/workspace"] + ) + assert middleware.allowed_prefixes == ["/workspace"] + + +class TestMemoryMiddleware: + """Test memory middleware functionality.""" + + def test_middleware_initialization(self) -> None: + """Test middleware initializes correctly.""" + middleware = StateClaudeMemoryMiddleware() + assert middleware.state_schema == AnthropicToolsState + assert middleware.tool_type == "memory_20250818" + assert middleware.tool_name == "memory" + assert middleware.state_key == "memory_files" + assert middleware.system_prompt # Should have default prompt + + def test_custom_system_prompt(self) -> None: + """Test custom system prompt can be set.""" + custom_prompt = "Custom memory instructions" + middleware = StateClaudeMemoryMiddleware(system_prompt=custom_prompt) + assert middleware.system_prompt == custom_prompt + + +class TestFileOperations: + """Test file operation implementations via wrap_tool_call.""" + + def test_view_operation(self) -> None: + """Test view command execution.""" + middleware = StateClaudeTextEditorMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/test.txt": { + "content": ["line1", "line2", "line3"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + } + }, + } + + args = {"command": "view", "path": "/test.txt"} + result = middleware._handle_view(args, state, "test_id") + + assert isinstance(result, Command) + assert result.update is not None + messages = result.update.get("messages", []) + assert len(messages) == 1 + assert isinstance(messages[0], ToolMessage) + assert messages[0].content == "1|line1\n2|line2\n3|line3" + assert messages[0].tool_call_id == "test_id" + + def test_create_operation(self) -> None: + """Test create command execution.""" + middleware = StateClaudeTextEditorMiddleware() + + state: AnthropicToolsState = {"messages": []} + + args = {"command": "create", "path": "/test.txt", "file_text": "line1\nline2"} + result = middleware._handle_create(args, state, "test_id") + + assert isinstance(result, Command) + assert result.update is not None + files = result.update.get("text_editor_files", {}) + assert "/test.txt" in files + assert files["/test.txt"]["content"] == ["line1", "line2"] + + def test_path_prefix_enforcement(self) -> None: + """Test that path prefixes are enforced.""" + middleware = StateClaudeTextEditorMiddleware( + allowed_path_prefixes=["/workspace"] + ) + + state: AnthropicToolsState = {"messages": []} + + # Should fail with /etc/passwd + args = {"command": "create", "path": "/etc/passwd", "file_text": "test"} + + with pytest.raises(ValueError, match="Path must start with"): + middleware._handle_create(args, state, "test_id") + + def test_memories_prefix_enforcement(self) -> None: + """Test that /memories prefix is enforced for memory middleware.""" + middleware = StateClaudeMemoryMiddleware() + + state: AnthropicToolsState = {"messages": []} + + # Should fail with /other/path + args = {"command": "create", "path": "/other/path.txt", "file_text": "test"} + + with pytest.raises(ValueError, match="/memories"): + middleware._handle_create(args, state, "test_id") + + def test_str_replace_operation(self) -> None: + """Test str_replace command execution.""" + middleware = StateClaudeTextEditorMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/test.txt": { + "content": ["Hello world", "Goodbye world"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + } + }, + } + + args = { + "command": "str_replace", + "path": "/test.txt", + "old_str": "world", + "new_str": "universe", + } + result = middleware._handle_str_replace(args, state, "test_id") + + assert isinstance(result, Command) + assert result.update is not None + files = result.update.get("text_editor_files", {}) + # Should only replace first occurrence + assert files["/test.txt"]["content"] == ["Hello universe", "Goodbye world"] + + def test_insert_operation(self) -> None: + """Test insert command execution.""" + middleware = StateClaudeTextEditorMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/test.txt": { + "content": ["line1", "line2"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + } + }, + } + + args = { + "command": "insert", + "path": "/test.txt", + "insert_line": 0, + "new_str": "inserted", + } + result = middleware._handle_insert(args, state, "test_id") + + assert isinstance(result, Command) + assert result.update is not None + files = result.update.get("text_editor_files", {}) + assert files["/test.txt"]["content"] == ["inserted", "line1", "line2"] + + def test_delete_operation(self) -> None: + """Test delete command execution (memory only).""" + middleware = StateClaudeMemoryMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "memory_files": { + "/memories/test.txt": { + "content": ["line1"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + } + }, + } + + args = {"command": "delete", "path": "/memories/test.txt"} + result = middleware._handle_delete(args, state, "test_id") + + assert isinstance(result, Command) + assert result.update is not None + files = result.update.get("memory_files", {}) + # Deleted files are marked as None in state + assert files.get("/memories/test.txt") is None + + def test_rename_operation(self) -> None: + """Test rename command execution (memory only).""" + middleware = StateClaudeMemoryMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "memory_files": { + "/memories/old.txt": { + "content": ["line1"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + } + }, + } + + args = { + "command": "rename", + "old_path": "/memories/old.txt", + "new_path": "/memories/new.txt", + } + result = middleware._handle_rename(args, state, "test_id") + + assert isinstance(result, Command) + assert result.update is not None + files = result.update.get("memory_files", {}) + # Old path is marked as None (deleted) + assert files.get("/memories/old.txt") is None + # New path has the file data + assert files.get("/memories/new.txt") is not None + assert files["/memories/new.txt"]["content"] == ["line1"] diff --git a/libs/partners/anthropic/tests/unit_tests/middleware/test_bash.py b/libs/partners/anthropic/tests/unit_tests/middleware/test_bash.py new file mode 100644 index 00000000000..ca47a241dfa --- /dev/null +++ b/libs/partners/anthropic/tests/unit_tests/middleware/test_bash.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +from unittest.mock import MagicMock + +import pytest +from langchain_core.messages.tool import ToolCall + +pytest.importorskip( + "anthropic", reason="Anthropic SDK is required for Claude middleware tests" +) + +from langchain.agents.middleware.types import ToolCallRequest +from langchain_core.messages import ToolMessage + +from langchain_anthropic.middleware.bash import ClaudeBashToolMiddleware + + +def test_wrap_tool_call_handles_claude_bash(monkeypatch: pytest.MonkeyPatch) -> None: + middleware = ClaudeBashToolMiddleware() + sentinel = ToolMessage(content="ok", tool_call_id="call-1", name="bash") + + monkeypatch.setattr(middleware, "_run_shell_tool", MagicMock(return_value=sentinel)) + monkeypatch.setattr( + middleware, "_ensure_resources", MagicMock(return_value=MagicMock()) + ) + + tool_call: ToolCall = { + "name": "bash", + "args": {"command": "echo hi"}, + "id": "call-1", + } + request = ToolCallRequest( + tool_call=tool_call, + tool=MagicMock(), + state={}, + runtime=None, # type: ignore[arg-type] + ) + + handler_called = False + + def handler(_: ToolCallRequest) -> ToolMessage: + nonlocal handler_called + handler_called = True + return ToolMessage(content="should not be used", tool_call_id="call-1") + + result = middleware.wrap_tool_call(request, handler) + assert result is sentinel + assert handler_called is False + + +def test_wrap_tool_call_passes_through_other_tools( + monkeypatch: pytest.MonkeyPatch, +) -> None: + middleware = ClaudeBashToolMiddleware() + tool_call: ToolCall = {"name": "other", "args": {}, "id": "call-2"} + request = ToolCallRequest( + tool_call=tool_call, + tool=MagicMock(), + state={}, + runtime=None, # type: ignore[arg-type] + ) + + sentinel = ToolMessage(content="handled", tool_call_id="call-2", name="other") + + def handler(_: ToolCallRequest) -> ToolMessage: + return sentinel + + result = middleware.wrap_tool_call(request, handler) + assert result is sentinel diff --git a/libs/partners/anthropic/tests/unit_tests/middleware/test_file_search.py b/libs/partners/anthropic/tests/unit_tests/middleware/test_file_search.py new file mode 100644 index 00000000000..395a368034c --- /dev/null +++ b/libs/partners/anthropic/tests/unit_tests/middleware/test_file_search.py @@ -0,0 +1,493 @@ +"""Unit tests for file search middleware.""" + +from langchain_anthropic.middleware.anthropic_tools import AnthropicToolsState +from langchain_anthropic.middleware.file_search import ( + StateFileSearchMiddleware, +) + + +class TestSearchMiddlewareInitialization: + """Test search middleware initialization.""" + + def test_middleware_initialization(self) -> None: + """Test middleware initializes correctly.""" + middleware = StateFileSearchMiddleware() + assert middleware.state_schema == AnthropicToolsState + assert middleware.state_key == "text_editor_files" + + def test_custom_state_key(self) -> None: + """Test middleware with custom state key.""" + middleware = StateFileSearchMiddleware(state_key="memory_files") + assert middleware.state_key == "memory_files" + + +class TestGlobSearch: + """Test Glob file pattern matching.""" + + def test_glob_basic_pattern(self) -> None: + """Test basic glob pattern matching.""" + middleware = StateFileSearchMiddleware() + + test_state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": ["print('hello')"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/src/utils.py": { + "content": ["def helper(): pass"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/README.md": { + "content": ["# Project"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + # Call tool function directly (state is injected in real usage) + result = middleware.glob_search.func(pattern="*.py", state=test_state) # type: ignore[attr-defined] + + assert isinstance(result, str) + assert "/src/main.py" in result + assert "/src/utils.py" in result + assert "/README.md" not in result + + def test_glob_recursive_pattern(self) -> None: + """Test recursive glob pattern matching.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": [], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/src/utils/helper.py": { + "content": [], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/tests/test_main.py": { + "content": [], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.glob_search.func(pattern="**/*.py", state=state) # type: ignore[attr-defined] + + assert isinstance(result, str) + lines = result.split("\n") + assert len(lines) == 3 + assert all(".py" in line for line in lines) + + def test_glob_with_base_path(self) -> None: + """Test glob with base path restriction.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": [], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/tests/test.py": { + "content": [], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.glob_search.func( # type: ignore[attr-defined] + pattern="**/*.py", path="/src", state=state + ) + + assert isinstance(result, str) + assert "/src/main.py" in result + assert "/tests/test.py" not in result + + def test_glob_no_matches(self) -> None: + """Test glob with no matching files.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": [], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.glob_search.func(pattern="*.ts", state=state) # type: ignore[attr-defined] + + assert isinstance(result, str) + assert result == "No files found" + + def test_glob_sorts_by_modified_time(self) -> None: + """Test that glob results are sorted by modification time.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/old.py": { + "content": [], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/new.py": { + "content": [], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-02T00:00:00", + }, + }, + } + + result = middleware.glob_search.func(pattern="*.py", state=state) # type: ignore[attr-defined] + + lines = result.split("\n") + # Most recent first + assert lines[0] == "/new.py" + assert lines[1] == "/old.py" + + +class TestGrepSearch: + """Test Grep content search.""" + + def test_grep_files_with_matches_mode(self) -> None: + """Test grep with files_with_matches output mode.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": ["def foo():", " pass"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/src/utils.py": { + "content": ["def bar():", " return None"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/README.md": { + "content": ["# Documentation", "No code here"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.grep_search.func(pattern=r"def \w+\(\):", state=state) # type: ignore[attr-defined] + + assert isinstance(result, str) + assert "/src/main.py" in result + assert "/src/utils.py" in result + assert "/README.md" not in result + # Should only have file paths, not line content + + def test_grep_invalid_include_pattern(self) -> None: + """Return error when include glob is invalid.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": ["def foo():"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + } + }, + } + + result = middleware.grep_search.func( # type: ignore[attr-defined] + pattern=r"def", include="*.{py", state=state + ) + + assert result == "Invalid include pattern" + + +class TestFilesystemGrepSearch: + """Tests for filesystem-backed grep search.""" + + def test_grep_content_mode(self) -> None: + """Test grep with content output mode.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": ["def foo():", " pass", "def bar():"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.grep_search.func( # type: ignore[attr-defined] + pattern=r"def \w+\(\):", output_mode="content", state=state + ) + + assert isinstance(result, str) + lines = result.split("\n") + assert len(lines) == 2 + assert lines[0] == "/src/main.py:1:def foo():" + assert lines[1] == "/src/main.py:3:def bar():" + + def test_grep_count_mode(self) -> None: + """Test grep with count output mode.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": ["TODO: fix this", "print('hello')", "TODO: add tests"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/src/utils.py": { + "content": ["TODO: implement"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.grep_search.func( # type: ignore[attr-defined] + pattern=r"TODO", output_mode="count", state=state + ) + + assert isinstance(result, str) + lines = result.split("\n") + assert "/src/main.py:2" in lines + assert "/src/utils.py:1" in lines + + def test_grep_with_include_filter(self) -> None: + """Test grep with include file pattern filter.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": ["import os"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/src/main.ts": { + "content": ["import os from 'os'"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.grep_search.func( # type: ignore[attr-defined] + pattern="import", include="*.py", state=state + ) + + assert isinstance(result, str) + assert "/src/main.py" in result + assert "/src/main.ts" not in result + + def test_grep_with_brace_expansion_filter(self) -> None: + """Test grep with brace expansion in include filter.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.ts": { + "content": ["const x = 1"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/src/App.tsx": { + "content": ["const y = 2"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/src/main.py": { + "content": ["z = 3"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.grep_search.func( # type: ignore[attr-defined] + pattern="const", include="*.{ts,tsx}", state=state + ) + + assert isinstance(result, str) + assert "/src/main.ts" in result + assert "/src/App.tsx" in result + assert "/src/main.py" not in result + + def test_grep_with_base_path(self) -> None: + """Test grep with base path restriction.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": ["import foo"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + "/tests/test.py": { + "content": ["import foo"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.grep_search.func(pattern="import", path="/src", state=state) # type: ignore[attr-defined] + + assert isinstance(result, str) + assert "/src/main.py" in result + assert "/tests/test.py" not in result + + def test_grep_no_matches(self) -> None: + """Test grep with no matching content.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": ["print('hello')"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.grep_search.func(pattern=r"TODO", state=state) # type: ignore[attr-defined] + + assert isinstance(result, str) + assert result == "No matches found" + + def test_grep_invalid_regex(self) -> None: + """Test grep with invalid regex pattern.""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": {}, + } + + result = middleware.grep_search.func(pattern=r"[unclosed", state=state) # type: ignore[attr-defined] + + assert isinstance(result, str) + assert "Invalid regex pattern" in result + + +class TestSearchWithDifferentBackends: + """Test searching with different backend configurations.""" + + def test_glob_default_backend(self) -> None: + """Test that glob searches the default backend (text_editor_files).""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": [], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + "memory_files": { + "/memories/notes.txt": { + "content": [], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.glob_search.func(pattern="**/*", state=state) # type: ignore[attr-defined] + + assert isinstance(result, str) + assert "/src/main.py" in result + # Should NOT find memory_files since default backend is text_editor_files + assert "/memories/notes.txt" not in result + + def test_grep_default_backend(self) -> None: + """Test that grep searches the default backend (text_editor_files).""" + middleware = StateFileSearchMiddleware() + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": ["TODO: implement"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + "memory_files": { + "/memories/tasks.txt": { + "content": ["TODO: review"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.grep_search.func(pattern=r"TODO", state=state) # type: ignore[attr-defined] + + assert isinstance(result, str) + assert "/src/main.py" in result + # Should NOT find memory_files since default backend is text_editor_files + assert "/memories/tasks.txt" not in result + + def test_search_with_single_store(self) -> None: + """Test searching with a specific state key.""" + middleware = StateFileSearchMiddleware(state_key="text_editor_files") + + state: AnthropicToolsState = { + "messages": [], + "text_editor_files": { + "/src/main.py": { + "content": ["code"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + "memory_files": { + "/memories/notes.txt": { + "content": ["notes"], + "created_at": "2025-01-01T00:00:00", + "modified_at": "2025-01-01T00:00:00", + }, + }, + } + + result = middleware.grep_search.func(pattern=r".*", state=state) # type: ignore[attr-defined] + + assert isinstance(result, str) + assert "/src/main.py" in result + assert "/memories/notes.txt" not in result diff --git a/libs/partners/anthropic/tests/unit_tests/middleware/test_prompt_caching.py b/libs/partners/anthropic/tests/unit_tests/middleware/test_prompt_caching.py new file mode 100644 index 00000000000..e06ebebaf53 --- /dev/null +++ b/libs/partners/anthropic/tests/unit_tests/middleware/test_prompt_caching.py @@ -0,0 +1,311 @@ +"""Tests for Anthropic prompt caching middleware.""" + +import warnings +from typing import Any, cast +from unittest.mock import MagicMock + +import pytest +from langchain.agents.middleware.types import ModelRequest, ModelResponse +from langchain_core.callbacks import ( + AsyncCallbackManagerForLLMRun, + CallbackManagerForLLMRun, +) +from langchain_core.language_models import BaseChatModel +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.outputs import ChatGeneration, ChatResult +from langgraph.runtime import Runtime + +from langchain_anthropic.chat_models import ChatAnthropic +from langchain_anthropic.middleware import AnthropicPromptCachingMiddleware + + +class FakeToolCallingModel(BaseChatModel): + """Fake model for testing middleware.""" + + def _generate( + self, + messages: list[BaseMessage], + stop: list[str] | None = None, + run_manager: CallbackManagerForLLMRun | None = None, + **kwargs: Any, + ) -> ChatResult: + """Top Level call""" + messages_string = "-".join([str(m.content) for m in messages]) + message = AIMessage(content=messages_string, id="0") + return ChatResult(generations=[ChatGeneration(message=message)]) + + async def _agenerate( + self, + messages: list[BaseMessage], + stop: list[str] | None = None, + run_manager: AsyncCallbackManagerForLLMRun | None = None, + **kwargs: Any, + ) -> ChatResult: + """Async top level call""" + messages_string = "-".join([str(m.content) for m in messages]) + message = AIMessage(content=messages_string, id="0") + return ChatResult(generations=[ChatGeneration(message=message)]) + + @property + def _llm_type(self) -> str: + return "fake-tool-call-model" + + +def test_anthropic_prompt_caching_middleware_initialization() -> None: + """Test AnthropicPromptCachingMiddleware initialization.""" + # Test with custom values + middleware = AnthropicPromptCachingMiddleware( + type="ephemeral", ttl="1h", min_messages_to_cache=5 + ) + assert middleware.type == "ephemeral" + assert middleware.ttl == "1h" + assert middleware.min_messages_to_cache == 5 + + # Test with default values + middleware = AnthropicPromptCachingMiddleware() + assert middleware.type == "ephemeral" + assert middleware.ttl == "5m" + assert middleware.min_messages_to_cache == 0 + + # Create a mock ChatAnthropic instance + mock_chat_anthropic = MagicMock(spec=ChatAnthropic) + + fake_request = ModelRequest( + model=mock_chat_anthropic, + messages=[HumanMessage("Hello")], + system_prompt=None, + tool_choice=None, + tools=[], + response_format=None, + state={"messages": [HumanMessage("Hello")]}, + runtime=cast(Runtime, object()), + model_settings={}, + ) + + def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="mock response")]) + + middleware.wrap_model_call(fake_request, mock_handler) + # Check that model_settings were passed through via the request + assert fake_request.model_settings == { + "cache_control": {"type": "ephemeral", "ttl": "5m"} + } + + +def test_anthropic_prompt_caching_middleware_unsupported_model() -> None: + """Test AnthropicPromptCachingMiddleware with unsupported model.""" + fake_request = ModelRequest( + model=FakeToolCallingModel(), + messages=[HumanMessage("Hello")], + system_prompt=None, + tool_choice=None, + tools=[], + response_format=None, + state={"messages": [HumanMessage("Hello")]}, + runtime=cast(Runtime, object()), + model_settings={}, + ) + + middleware = AnthropicPromptCachingMiddleware(unsupported_model_behavior="raise") + + def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="mock response")]) + + # Since we're in the langchain-anthropic package, ChatAnthropic is always + # available. Test that it raises an error for unsupported model instances + with pytest.raises( + ValueError, + match=( + "AnthropicPromptCachingMiddleware caching middleware only supports " + "Anthropic models, not instances of" + ), + ): + middleware.wrap_model_call(fake_request, mock_handler) + + middleware = AnthropicPromptCachingMiddleware(unsupported_model_behavior="warn") + + # Test warn behavior for unsupported model instances + with warnings.catch_warnings(record=True) as w: + result = middleware.wrap_model_call(fake_request, mock_handler) + assert isinstance(result, ModelResponse) + assert len(w) == 1 + assert ( + "AnthropicPromptCachingMiddleware caching middleware only supports " + "Anthropic models, not instances of" + ) in str(w[-1].message) + + # Test ignore behavior + middleware = AnthropicPromptCachingMiddleware(unsupported_model_behavior="ignore") + result = middleware.wrap_model_call(fake_request, mock_handler) + assert isinstance(result, ModelResponse) + + +async def test_anthropic_prompt_caching_middleware_async() -> None: + """Test AnthropicPromptCachingMiddleware async path.""" + # Test with custom values + middleware = AnthropicPromptCachingMiddleware( + type="ephemeral", ttl="1h", min_messages_to_cache=5 + ) + + # Create a mock ChatAnthropic instance + mock_chat_anthropic = MagicMock(spec=ChatAnthropic) + + fake_request = ModelRequest( + model=mock_chat_anthropic, + messages=[HumanMessage("Hello")] * 6, + system_prompt=None, + tool_choice=None, + tools=[], + response_format=None, + state={"messages": [HumanMessage("Hello")] * 6}, + runtime=cast(Runtime, object()), + model_settings={}, + ) + + async def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="mock response")]) + + result = await middleware.awrap_model_call(fake_request, mock_handler) + assert isinstance(result, ModelResponse) + # Check that model_settings were passed through via the request + assert fake_request.model_settings == { + "cache_control": {"type": "ephemeral", "ttl": "1h"} + } + + +async def test_anthropic_prompt_caching_middleware_async_unsupported_model() -> None: + """Test AnthropicPromptCachingMiddleware async path with unsupported model.""" + fake_request = ModelRequest( + model=FakeToolCallingModel(), + messages=[HumanMessage("Hello")], + system_prompt=None, + tool_choice=None, + tools=[], + response_format=None, + state={"messages": [HumanMessage("Hello")]}, + runtime=cast(Runtime, object()), + model_settings={}, + ) + + middleware = AnthropicPromptCachingMiddleware(unsupported_model_behavior="raise") + + async def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="mock response")]) + + # Test that it raises an error for unsupported model instances + with pytest.raises( + ValueError, + match=( + "AnthropicPromptCachingMiddleware caching middleware only supports " + "Anthropic models, not instances of" + ), + ): + await middleware.awrap_model_call(fake_request, mock_handler) + + middleware = AnthropicPromptCachingMiddleware(unsupported_model_behavior="warn") + + # Test warn behavior for unsupported model instances + with warnings.catch_warnings(record=True) as w: + result = await middleware.awrap_model_call(fake_request, mock_handler) + assert isinstance(result, ModelResponse) + assert len(w) == 1 + assert ( + "AnthropicPromptCachingMiddleware caching middleware only supports " + "Anthropic models, not instances of" + ) in str(w[-1].message) + + # Test ignore behavior + middleware = AnthropicPromptCachingMiddleware(unsupported_model_behavior="ignore") + result = await middleware.awrap_model_call(fake_request, mock_handler) + assert isinstance(result, ModelResponse) + + +async def test_anthropic_prompt_caching_middleware_async_min_messages() -> None: + """Test async path respects min_messages_to_cache.""" + middleware = AnthropicPromptCachingMiddleware(min_messages_to_cache=5) + + # Test with fewer messages than minimum + fake_request = ModelRequest( + model=FakeToolCallingModel(), + messages=[HumanMessage("Hello")] * 3, + system_prompt=None, + tool_choice=None, + tools=[], + response_format=None, + state={"messages": [HumanMessage("Hello")] * 3}, + runtime=cast(Runtime, object()), + model_settings={}, + ) + + async def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="mock response")]) + + result = await middleware.awrap_model_call(fake_request, mock_handler) + assert isinstance(result, ModelResponse) + # Cache control should NOT be added when message count is below minimum + assert fake_request.model_settings == {} + + +async def test_anthropic_prompt_caching_middleware_async_with_system_prompt() -> None: + """Test async path counts system prompt in message count.""" + middleware = AnthropicPromptCachingMiddleware( + type="ephemeral", ttl="1h", min_messages_to_cache=3 + ) + + # Create a mock ChatAnthropic instance + mock_chat_anthropic = MagicMock(spec=ChatAnthropic) + + # Test with system prompt: 2 messages + 1 system = 3 total (meets minimum) + fake_request = ModelRequest( + model=mock_chat_anthropic, + messages=[HumanMessage("Hello"), HumanMessage("World")], + system_prompt="You are a helpful assistant", + tool_choice=None, + tools=[], + response_format=None, + state={"messages": [HumanMessage("Hello"), HumanMessage("World")]}, + runtime=cast(Runtime, object()), + model_settings={}, + ) + + async def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="mock response")]) + + result = await middleware.awrap_model_call(fake_request, mock_handler) + assert isinstance(result, ModelResponse) + # Cache control should be added when system prompt pushes count to minimum + assert fake_request.model_settings == { + "cache_control": {"type": "ephemeral", "ttl": "1h"} + } + + +async def test_anthropic_prompt_caching_middleware_async_default_values() -> None: + """Test async path with default middleware initialization.""" + # Test with default values (min_messages_to_cache=0) + middleware = AnthropicPromptCachingMiddleware() + + # Create a mock ChatAnthropic instance + mock_chat_anthropic = MagicMock(spec=ChatAnthropic) + + # Single message should trigger caching with default settings + fake_request = ModelRequest( + model=mock_chat_anthropic, + messages=[HumanMessage("Hello")], + system_prompt=None, + tool_choice=None, + tools=[], + response_format=None, + state={"messages": [HumanMessage("Hello")]}, + runtime=cast(Runtime, object()), + model_settings={}, + ) + + async def mock_handler(req: ModelRequest) -> ModelResponse: + return ModelResponse(result=[AIMessage(content="mock response")]) + + result = await middleware.awrap_model_call(fake_request, mock_handler) + assert isinstance(result, ModelResponse) + # Check that model_settings were added with default values + assert fake_request.model_settings == { + "cache_control": {"type": "ephemeral", "ttl": "5m"} + } diff --git a/libs/partners/anthropic/tests/unit_tests/test_chat_models.py b/libs/partners/anthropic/tests/unit_tests/test_chat_models.py index 7aabfe860b5..88f1ab5644a 100644 --- a/libs/partners/anthropic/tests/unit_tests/test_chat_models.py +++ b/libs/partners/anthropic/tests/unit_tests/test_chat_models.py @@ -29,19 +29,21 @@ from langchain_anthropic.chat_models import ( os.environ["ANTHROPIC_API_KEY"] = "foo" +MODEL_NAME = "claude-sonnet-4-5-20250929" + def test_initialization() -> None: """Test chat model initialization.""" for model in [ - ChatAnthropic(model_name="claude-instant-1.2", api_key="xyz", timeout=2), # type: ignore[arg-type, call-arg] + ChatAnthropic(model_name=MODEL_NAME, api_key="xyz", timeout=2), # type: ignore[arg-type, call-arg] ChatAnthropic( # type: ignore[call-arg, call-arg, call-arg] - model="claude-instant-1.2", + model=MODEL_NAME, anthropic_api_key="xyz", default_request_timeout=2, base_url="https://api.anthropic.com", ), ]: - assert model.model == "claude-instant-1.2" + assert model.model == MODEL_NAME assert cast("SecretStr", model.anthropic_api_key).get_secret_value() == "xyz" assert model.default_request_timeout == 2.0 assert model.anthropic_api_url == "https://api.anthropic.com" @@ -49,23 +51,23 @@ def test_initialization() -> None: @pytest.mark.parametrize("async_api", [True, False]) def test_streaming_attribute_should_stream(async_api: bool) -> None: # noqa: FBT001 - llm = ChatAnthropic(model="foo", streaming=True) + llm = ChatAnthropic(model=MODEL_NAME, streaming=True) assert llm._should_stream(async_api=async_api) def test_anthropic_client_caching() -> None: """Test that the OpenAI client is cached.""" - llm1 = ChatAnthropic(model="claude-3-5-sonnet-latest") - llm2 = ChatAnthropic(model="claude-3-5-sonnet-latest") + llm1 = ChatAnthropic(model=MODEL_NAME) + llm2 = ChatAnthropic(model=MODEL_NAME) assert llm1._client._client is llm2._client._client - llm3 = ChatAnthropic(model="claude-3-5-sonnet-latest", base_url="foo") + llm3 = ChatAnthropic(model=MODEL_NAME, base_url="foo") assert llm1._client._client is not llm3._client._client - llm4 = ChatAnthropic(model="claude-3-5-sonnet-latest", timeout=None) + llm4 = ChatAnthropic(model=MODEL_NAME, timeout=None) assert llm1._client._client is llm4._client._client - llm5 = ChatAnthropic(model="claude-3-5-sonnet-latest", timeout=3) + llm5 = ChatAnthropic(model=MODEL_NAME, timeout=3) assert llm1._client._client is not llm5._client._client @@ -74,9 +76,7 @@ def test_anthropic_proxy_support() -> None: proxy_url = "http://proxy.example.com:8080" # Test sync client with proxy - llm_sync = ChatAnthropic( - model="claude-3-5-sonnet-latest", anthropic_proxy=proxy_url - ) + llm_sync = ChatAnthropic(model=MODEL_NAME, anthropic_proxy=proxy_url) sync_client = llm_sync._client assert sync_client is not None @@ -85,10 +85,8 @@ def test_anthropic_proxy_support() -> None: assert async_client is not None # Test that clients with different proxy settings are not cached together - llm_no_proxy = ChatAnthropic(model="claude-3-5-sonnet-latest") - llm_with_proxy = ChatAnthropic( - model="claude-3-5-sonnet-latest", anthropic_proxy=proxy_url - ) + llm_no_proxy = ChatAnthropic(model=MODEL_NAME) + llm_with_proxy = ChatAnthropic(model=MODEL_NAME, anthropic_proxy=proxy_url) # Different proxy settings should result in different cached clients assert llm_no_proxy._client._client is not llm_with_proxy._client._client @@ -100,7 +98,7 @@ def test_anthropic_proxy_from_environment() -> None: # Test with environment variable set with patch.dict(os.environ, {"ANTHROPIC_PROXY": proxy_url}): - llm = ChatAnthropic(model="claude-3-5-sonnet-latest") + llm = ChatAnthropic(model=MODEL_NAME) assert llm.anthropic_proxy == proxy_url # Should be able to create clients successfully @@ -112,82 +110,76 @@ def test_anthropic_proxy_from_environment() -> None: # Test that explicit parameter overrides environment variable with patch.dict(os.environ, {"ANTHROPIC_PROXY": "http://env-proxy.com"}): explicit_proxy = "http://explicit-proxy.com" - llm = ChatAnthropic( - model="claude-3-5-sonnet-latest", anthropic_proxy=explicit_proxy - ) + llm = ChatAnthropic(model=MODEL_NAME, anthropic_proxy=explicit_proxy) assert llm.anthropic_proxy == explicit_proxy def test_set_default_max_tokens() -> None: """Test the set_default_max_tokens function.""" + # Test claude-sonnet-4-5 models + llm = ChatAnthropic(model="claude-sonnet-4-5-20250929", anthropic_api_key="test") + assert llm.max_tokens == 64000 + # Test claude-opus-4 models llm = ChatAnthropic(model="claude-opus-4-20250514", anthropic_api_key="test") assert llm.max_tokens == 32000 # Test claude-sonnet-4 models - llm = ChatAnthropic(model="claude-sonnet-4-latest", anthropic_api_key="test") + llm = ChatAnthropic(model="claude-sonnet-4-20250514", anthropic_api_key="test") assert llm.max_tokens == 64000 # Test claude-3-7-sonnet models - llm = ChatAnthropic(model="claude-3-7-sonnet-latest", anthropic_api_key="test") + llm = ChatAnthropic(model="claude-3-7-sonnet-20250219", anthropic_api_key="test") assert llm.max_tokens == 64000 - # Test claude-3-5-sonnet models - llm = ChatAnthropic(model="claude-3-5-sonnet-latest", anthropic_api_key="test") - assert llm.max_tokens == 8192 - # Test claude-3-5-haiku models - llm = ChatAnthropic(model="claude-3-5-haiku-latest", anthropic_api_key="test") + llm = ChatAnthropic(model="claude-3-5-haiku-20241022", anthropic_api_key="test") assert llm.max_tokens == 8192 # Test claude-3-haiku models (should default to 4096) - llm = ChatAnthropic(model="claude-3-haiku-latest", anthropic_api_key="test") + llm = ChatAnthropic(model="claude-3-haiku-20240307", anthropic_api_key="test") assert llm.max_tokens == 4096 # Test that existing max_tokens values are preserved - llm = ChatAnthropic( - model="claude-3-5-sonnet-latest", max_tokens=2048, anthropic_api_key="test" - ) + llm = ChatAnthropic(model=MODEL_NAME, max_tokens=2048, anthropic_api_key="test") assert llm.max_tokens == 2048 # Test that explicitly set max_tokens values are preserved - llm = ChatAnthropic( - model="claude-3-5-sonnet-latest", max_tokens=4096, anthropic_api_key="test" - ) + llm = ChatAnthropic(model=MODEL_NAME, max_tokens=4096, anthropic_api_key="test") assert llm.max_tokens == 4096 @pytest.mark.requires("anthropic") def test_anthropic_model_name_param() -> None: - llm = ChatAnthropic(model_name="foo") # type: ignore[call-arg, call-arg] - assert llm.model == "foo" + llm = ChatAnthropic(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg] + assert llm.model == MODEL_NAME @pytest.mark.requires("anthropic") def test_anthropic_model_param() -> None: - llm = ChatAnthropic(model="foo") # type: ignore[call-arg] - assert llm.model == "foo" + llm = ChatAnthropic(model=MODEL_NAME) # type: ignore[call-arg] + assert llm.model == MODEL_NAME @pytest.mark.requires("anthropic") def test_anthropic_model_kwargs() -> None: - llm = ChatAnthropic(model_name="foo", model_kwargs={"foo": "bar"}) # type: ignore[call-arg, call-arg] + llm = ChatAnthropic(model_name=MODEL_NAME, model_kwargs={"foo": "bar"}) # type: ignore[call-arg, call-arg] assert llm.model_kwargs == {"foo": "bar"} @pytest.mark.requires("anthropic") def test_anthropic_fields_in_model_kwargs() -> None: """Test that for backwards compatibility fields can be passed in as model_kwargs.""" - llm = ChatAnthropic(model="foo", model_kwargs={"max_tokens_to_sample": 5}) # type: ignore[call-arg] + llm = ChatAnthropic(model=MODEL_NAME, model_kwargs={"max_tokens_to_sample": 5}) # type: ignore[call-arg] assert llm.max_tokens == 5 - llm = ChatAnthropic(model="foo", model_kwargs={"max_tokens": 5}) # type: ignore[call-arg] + llm = ChatAnthropic(model=MODEL_NAME, model_kwargs={"max_tokens": 5}) # type: ignore[call-arg] assert llm.max_tokens == 5 @pytest.mark.requires("anthropic") def test_anthropic_incorrect_field() -> None: with pytest.warns(match="not default parameter"): - llm = ChatAnthropic(model="foo", foo="bar") # type: ignore[call-arg, call-arg] + llm = ChatAnthropic(model=MODEL_NAME, foo="bar") # type: ignore[call-arg, call-arg] assert llm.model_kwargs == {"foo": "bar"} @@ -196,7 +188,7 @@ def test_anthropic_initialization() -> None: """Test anthropic initialization.""" # Verify that chat anthropic can be initialized using a secret key provided # as a parameter rather than an environment variable. - ChatAnthropic(model="test", anthropic_api_key="test") # type: ignore[call-arg, call-arg] + ChatAnthropic(model=MODEL_NAME, anthropic_api_key="test") # type: ignore[call-arg, call-arg] def test__format_output() -> None: @@ -220,7 +212,7 @@ def test__format_output() -> None: }, response_metadata={"model_provider": "anthropic"}, ) - llm = ChatAnthropic(model="test", anthropic_api_key="test") # type: ignore[call-arg, call-arg] + llm = ChatAnthropic(model=MODEL_NAME, anthropic_api_key="test") # type: ignore[call-arg, call-arg] actual = llm._format_output(anthropic_msg) assert actual.generations[0].message == expected @@ -252,7 +244,7 @@ def test__format_output_cached() -> None: response_metadata={"model_provider": "anthropic"}, ) - llm = ChatAnthropic(model="test", anthropic_api_key="test") # type: ignore[call-arg, call-arg] + llm = ChatAnthropic(model=MODEL_NAME, anthropic_api_key="test") # type: ignore[call-arg, call-arg] actual = llm._format_output(anthropic_msg) assert actual.generations[0].message == expected @@ -1222,7 +1214,7 @@ def test__format_messages_with_multiple_system() -> None: def test_anthropic_api_key_is_secret_string() -> None: """Test that the API key is stored as a SecretStr.""" chat_model = ChatAnthropic( # type: ignore[call-arg, call-arg] - model="claude-3-opus-20240229", + model=MODEL_NAME, anthropic_api_key="secret-api-key", ) assert isinstance(chat_model.anthropic_api_key, SecretStr) @@ -1235,7 +1227,7 @@ def test_anthropic_api_key_masked_when_passed_from_env( """Test that the API key is masked when passed from an environment variable.""" monkeypatch.setenv("ANTHROPIC_API_KEY ", "secret-api-key") chat_model = ChatAnthropic( # type: ignore[call-arg] - model="claude-3-opus-20240229", + model=MODEL_NAME, ) print(chat_model.anthropic_api_key, end="") # noqa: T201 captured = capsys.readouterr() @@ -1248,7 +1240,7 @@ def test_anthropic_api_key_masked_when_passed_via_constructor( ) -> None: """Test that the API key is masked when passed via the constructor.""" chat_model = ChatAnthropic( # type: ignore[call-arg, call-arg] - model="claude-3-opus-20240229", + model=MODEL_NAME, anthropic_api_key="secret-api-key", ) print(chat_model.anthropic_api_key, end="") # noqa: T201 @@ -1260,7 +1252,7 @@ def test_anthropic_api_key_masked_when_passed_via_constructor( def test_anthropic_uses_actual_secret_value_from_secretstr() -> None: """Test that the actual secret value is correctly retrieved.""" chat_model = ChatAnthropic( # type: ignore[call-arg, call-arg] - model="claude-3-opus-20240229", + model=MODEL_NAME, anthropic_api_key="secret-api-key", ) assert ( @@ -1277,7 +1269,7 @@ class GetWeather(BaseModel): def test_anthropic_bind_tools_tool_choice() -> None: chat_model = ChatAnthropic( # type: ignore[call-arg, call-arg] - model="claude-3-opus-20240229", + model=MODEL_NAME, anthropic_api_key="secret-api-key", ) chat_model_with_tools = chat_model.bind_tools( @@ -1307,7 +1299,7 @@ def test_anthropic_bind_tools_tool_choice() -> None: def test_optional_description() -> None: - llm = ChatAnthropic(model="claude-3-5-haiku-latest") + llm = ChatAnthropic(model=MODEL_NAME) class SampleModel(BaseModel): sample_field: str @@ -1317,7 +1309,7 @@ def test_optional_description() -> None: def test_get_num_tokens_from_messages_passes_kwargs() -> None: """Test that get_num_tokens_from_messages passes kwargs to the model.""" - llm = ChatAnthropic(model="claude-3-5-haiku-latest") + llm = ChatAnthropic(model=MODEL_NAME) with patch.object(anthropic, "Client") as _client: llm.get_num_tokens_from_messages([HumanMessage("foo")], foo="bar") @@ -1325,7 +1317,7 @@ def test_get_num_tokens_from_messages_passes_kwargs() -> None: assert _client.return_value.messages.count_tokens.call_args.kwargs["foo"] == "bar" llm = ChatAnthropic( - model="claude-sonnet-4-5-20250929", + model=MODEL_NAME, betas=["context-management-2025-06-27"], context_management={"edits": [{"type": "clear_tool_uses_20250919"}]}, ) @@ -1405,7 +1397,7 @@ def test_mcp_tracing() -> None: ] llm = ChatAnthropic( - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", betas=["mcp-client-2025-04-04"], mcp_servers=mcp_servers, ) @@ -1440,7 +1432,7 @@ def test_mcp_tracing() -> None: def test_cache_control_kwarg() -> None: - llm = ChatAnthropic(model="claude-3-5-haiku-latest") + llm = ChatAnthropic(model=MODEL_NAME) messages = [HumanMessage("foo"), AIMessage("bar"), HumanMessage("baz")] payload = llm._get_request_payload(messages) @@ -1488,7 +1480,7 @@ def test_cache_control_kwarg() -> None: def test_context_management_in_payload() -> None: llm = ChatAnthropic( - model="claude-sonnet-4-5-20250929", # type: ignore[call-arg] + model=MODEL_NAME, # type: ignore[call-arg] betas=["context-management-2025-06-27"], context_management={"edits": [{"type": "clear_tool_uses_20250919"}]}, ) @@ -1503,19 +1495,19 @@ def test_context_management_in_payload() -> None: def test_anthropic_model_params() -> None: - llm = ChatAnthropic(model="claude-3-5-haiku-latest") + llm = ChatAnthropic(model=MODEL_NAME) ls_params = llm._get_ls_params() assert ls_params == { "ls_provider": "anthropic", "ls_model_type": "chat", - "ls_model_name": "claude-3-5-haiku-latest", - "ls_max_tokens": 8192, + "ls_model_name": MODEL_NAME, + "ls_max_tokens": 64000, "ls_temperature": None, } - ls_params = llm._get_ls_params(model="claude-opus-4-1-20250805") - assert ls_params.get("ls_model_name") == "claude-opus-4-1-20250805" + ls_params = llm._get_ls_params(model=MODEL_NAME) + assert ls_params.get("ls_model_name") == MODEL_NAME def test_streaming_cache_token_reporting() -> None: @@ -1528,7 +1520,7 @@ def test_streaming_cache_token_reporting() -> None: # Create a mock message_start event mock_message = MagicMock() - mock_message.model = "claude-3-sonnet-20240229" + mock_message.model = MODEL_NAME mock_message.usage.input_tokens = 100 mock_message.usage.output_tokens = 0 mock_message.usage.cache_read_input_tokens = 25 diff --git a/libs/partners/anthropic/tests/unit_tests/test_standard.py b/libs/partners/anthropic/tests/unit_tests/test_standard.py index d8cdafc159f..3b9071c5798 100644 --- a/libs/partners/anthropic/tests/unit_tests/test_standard.py +++ b/libs/partners/anthropic/tests/unit_tests/test_standard.py @@ -9,7 +9,7 @@ from langchain_anthropic import ChatAnthropic class TestAnthropicStandard(ChatModelUnitTests): - """Use the standard ChatModel unit tests against the ChatAnthropic class.""" + """Use the standard chat model unit tests against the `ChatAnthropic` class.""" @property def chat_model_class(self) -> type[BaseChatModel]: @@ -26,7 +26,7 @@ def test_init_time_with_client(benchmark: BenchmarkFixture) -> None: def _init_in_loop_with_clients() -> None: for _ in range(10): - llm = ChatAnthropic(model="claude-3-5-haiku-latest") + llm = ChatAnthropic(model="claude-3-5-haiku-20241022") _ = llm._client _ = llm._async_client diff --git a/libs/partners/anthropic/uv.lock b/libs/partners/anthropic/uv.lock index c8fee5e9780..68e52c48a01 100644 --- a/libs/partners/anthropic/uv.lock +++ b/libs/partners/anthropic/uv.lock @@ -21,7 +21,7 @@ wheels = [ [[package]] name = "anthropic" -version = "0.69.0" +version = "0.71.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -33,9 +33,9 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c8/9d/9ad1778b95f15c5b04e7d328c1b5f558f1e893857b7c33cd288c19c0057a/anthropic-0.69.0.tar.gz", hash = "sha256:c604d287f4d73640f40bd2c0f3265a2eb6ce034217ead0608f6b07a8bc5ae5f2", size = 480622, upload-time = "2025-09-29T16:53:45.282Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/4f/70682b068d897841f43223df82d96ec1d617435a8b759c4a2d901a50158b/anthropic-0.71.0.tar.gz", hash = "sha256:eb8e6fa86d049061b3ef26eb4cbae0174ebbff21affa6de7b3098da857d8de6a", size = 489102, upload-time = "2025-10-16T15:54:40.08Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/38/75129688de5637eb5b383e5f2b1570a5cc3aecafa4de422da8eea4b90a6c/anthropic-0.69.0-py3-none-any.whl", hash = "sha256:1f73193040f33f11e27c2cd6ec25f24fe7c3f193dc1c5cde6b7a08b18a16bcc5", size = 337265, upload-time = "2025-09-29T16:53:43.686Z" }, + { url = "https://files.pythonhosted.org/packages/5d/77/073e8ac488f335aec7001952825275582fb8f433737e90f24eeef9d878f6/anthropic-0.71.0-py3-none-any.whl", hash = "sha256:85c5015fcdbdc728390f11b17642a65a4365d03b12b799b18b6cc57e71fdb327", size = 355035, upload-time = "2025-10-16T15:54:38.238Z" }, ] [[package]] @@ -55,11 +55,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.8.3" +version = "2025.10.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, ] [[package]] @@ -146,66 +146,91 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.3" +version = "3.4.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, - { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, - { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, - { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, - { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, - { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, - { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, - { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, - { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, - { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, - { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, - { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, - { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, - { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, - { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, - { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, - { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, - { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, - { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, - { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, - { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, - { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, - { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, - { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, - { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, - { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, - { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, - { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, - { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, - { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, - { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, - { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, - { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, - { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, - { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, - { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, - { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, - { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, - { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, - { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, - { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, - { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, - { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, - { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, - { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, - { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, - { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, - { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, - { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, - { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, - { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, - { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, - { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, - { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, + { url = "https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d", size = 209709, upload-time = "2025-10-14T04:40:11.385Z" }, + { url = "https://files.pythonhosted.org/packages/5c/af/1f9d7f7faafe2ddfb6f72a2e07a548a629c61ad510fe60f9630309908fef/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8", size = 148814, upload-time = "2025-10-14T04:40:13.135Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/f2e3ac2bbc056ca0c204298ea4e3d9db9b4afe437812638759db2c976b5f/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad", size = 144467, upload-time = "2025-10-14T04:40:14.728Z" }, + { url = "https://files.pythonhosted.org/packages/ec/85/1bf997003815e60d57de7bd972c57dc6950446a3e4ccac43bc3070721856/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8", size = 162280, upload-time = "2025-10-14T04:40:16.14Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8e/6aa1952f56b192f54921c436b87f2aaf7c7a7c3d0d1a765547d64fd83c13/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d", size = 159454, upload-time = "2025-10-14T04:40:17.567Z" }, + { url = "https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313", size = 153609, upload-time = "2025-10-14T04:40:19.08Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/6a13396948b8fd3c4b4fd5bc74d045f5637d78c9675585e8e9fbe5636554/charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e", size = 151849, upload-time = "2025-10-14T04:40:20.607Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7a/59482e28b9981d105691e968c544cc0df3b7d6133152fb3dcdc8f135da7a/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93", size = 151586, upload-time = "2025-10-14T04:40:21.719Z" }, + { url = "https://files.pythonhosted.org/packages/92/59/f64ef6a1c4bdd2baf892b04cd78792ed8684fbc48d4c2afe467d96b4df57/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0", size = 145290, upload-time = "2025-10-14T04:40:23.069Z" }, + { url = "https://files.pythonhosted.org/packages/6b/63/3bf9f279ddfa641ffa1962b0db6a57a9c294361cc2f5fcac997049a00e9c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84", size = 163663, upload-time = "2025-10-14T04:40:24.17Z" }, + { url = "https://files.pythonhosted.org/packages/ed/09/c9e38fc8fa9e0849b172b581fd9803bdf6e694041127933934184e19f8c3/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e", size = 151964, upload-time = "2025-10-14T04:40:25.368Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d1/d28b747e512d0da79d8b6a1ac18b7ab2ecfd81b2944c4c710e166d8dd09c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db", size = 161064, upload-time = "2025-10-14T04:40:26.806Z" }, + { url = "https://files.pythonhosted.org/packages/bb/9a/31d62b611d901c3b9e5500c36aab0ff5eb442043fb3a1c254200d3d397d9/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6", size = 155015, upload-time = "2025-10-14T04:40:28.284Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/107e008fa2bff0c8b9319584174418e5e5285fef32f79d8ee6a430d0039c/charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f", size = 99792, upload-time = "2025-10-14T04:40:29.613Z" }, + { url = "https://files.pythonhosted.org/packages/eb/66/e396e8a408843337d7315bab30dbf106c38966f1819f123257f5520f8a96/charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d", size = 107198, upload-time = "2025-10-14T04:40:30.644Z" }, + { url = "https://files.pythonhosted.org/packages/b5/58/01b4f815bf0312704c267f2ccb6e5d42bcc7752340cd487bc9f8c3710597/charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69", size = 100262, upload-time = "2025-10-14T04:40:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, ] [[package]] @@ -256,6 +281,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, ] +[[package]] +name = "execnet" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524, upload-time = "2024-04-08T09:04:19.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612, upload-time = "2024-04-08T09:04:17.414Z" }, +] + [[package]] name = "freezegun" version = "1.5.5" @@ -307,11 +341,11 @@ wheels = [ [[package]] name = "idna" -version = "3.10" +version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] [[package]] @@ -325,75 +359,99 @@ wheels = [ [[package]] name = "jiter" -version = "0.11.0" +version = "0.11.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/68/0357982493a7b20925aece061f7fb7a2678e3b232f8d73a6edb7e5304443/jiter-0.11.1.tar.gz", hash = "sha256:849dcfc76481c0ea0099391235b7ca97d7279e0fa4c86005457ac7c88e8b76dc", size = 168385, upload-time = "2025-10-17T11:31:15.186Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/21/7dd1235a19e26979be6098e87e4cced2e061752f3a40a17bbce6dea7fae1/jiter-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3893ce831e1c0094a83eeaf56c635a167d6fa8cc14393cc14298fd6fdc2a2449", size = 309875, upload-time = "2025-09-15T09:18:48.41Z" }, - { url = "https://files.pythonhosted.org/packages/71/f9/462b54708aa85b135733ccba70529dd68a18511bf367a87c5fd28676c841/jiter-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:25c625b9b61b5a8725267fdf867ef2e51b429687f6a4eef211f4612e95607179", size = 316505, upload-time = "2025-09-15T09:18:51.057Z" }, - { url = "https://files.pythonhosted.org/packages/bd/40/14e2eeaac6a47bff27d213834795472355fd39769272eb53cb7aa83d5aa8/jiter-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd4ca85fb6a62cf72e1c7f5e34ddef1b660ce4ed0886ec94a1ef9777d35eaa1f", size = 337613, upload-time = "2025-09-15T09:18:52.358Z" }, - { url = "https://files.pythonhosted.org/packages/d3/ed/a5f1f8419c92b150a7c7fb5ccba1fb1e192887ad713d780e70874f0ce996/jiter-0.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:572208127034725e79c28437b82414028c3562335f2b4f451d98136d0fc5f9cd", size = 361438, upload-time = "2025-09-15T09:18:54.637Z" }, - { url = "https://files.pythonhosted.org/packages/dd/f5/70682c023dfcdd463a53faf5d30205a7d99c51d70d3e303c932d0936e5a2/jiter-0.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494ba627c7f550ad3dabb21862864b8f2216098dc18ff62f37b37796f2f7c325", size = 486180, upload-time = "2025-09-15T09:18:56.158Z" }, - { url = "https://files.pythonhosted.org/packages/7c/39/020d08cbab4eab48142ad88b837c41eb08a15c0767fdb7c0d3265128a44b/jiter-0.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8da18a99f58bca3ecc2d2bba99cac000a924e115b6c4f0a2b98f752b6fbf39a", size = 376681, upload-time = "2025-09-15T09:18:57.553Z" }, - { url = "https://files.pythonhosted.org/packages/52/10/b86733f6e594cf51dd142f37c602d8df87c554c5844958deaab0de30eb5d/jiter-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ffd3b0fff3fabbb02cc09910c08144db6bb5697a98d227a074401e01ee63dd", size = 348685, upload-time = "2025-09-15T09:18:59.208Z" }, - { url = "https://files.pythonhosted.org/packages/fb/ee/8861665e83a9e703aa5f65fddddb6225428e163e6b0baa95a7f9a8fb9aae/jiter-0.11.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fe6530aa738a4f7d4e4702aa8f9581425d04036a5f9e25af65ebe1f708f23be", size = 385573, upload-time = "2025-09-15T09:19:00.593Z" }, - { url = "https://files.pythonhosted.org/packages/25/74/05afec03600951f128293813b5a208c9ba1bf587c57a344c05a42a69e1b1/jiter-0.11.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e35d66681c133a03d7e974e7eedae89720fe8ca3bd09f01a4909b86a8adf31f5", size = 516669, upload-time = "2025-09-15T09:19:02.369Z" }, - { url = "https://files.pythonhosted.org/packages/93/d1/2e5bfe147cfbc2a5eef7f73eb75dc5c6669da4fa10fc7937181d93af9495/jiter-0.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c59459beca2fbc9718b6f1acb7bfb59ebc3eb4294fa4d40e9cb679dafdcc6c60", size = 508767, upload-time = "2025-09-15T09:19:04.011Z" }, - { url = "https://files.pythonhosted.org/packages/87/50/597f71307e10426b5c082fd05d38c615ddbdd08c3348d8502963307f0652/jiter-0.11.0-cp310-cp310-win32.whl", hash = "sha256:b7b0178417b0dcfc5f259edbc6db2b1f5896093ed9035ee7bab0f2be8854726d", size = 205476, upload-time = "2025-09-15T09:19:05.594Z" }, - { url = "https://files.pythonhosted.org/packages/c7/86/1e5214b3272e311754da26e63edec93a183811d4fc2e0118addec365df8b/jiter-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:11df2bf99fb4754abddd7f5d940a48e51f9d11624d6313ca4314145fcad347f0", size = 204708, upload-time = "2025-09-15T09:19:06.955Z" }, - { url = "https://files.pythonhosted.org/packages/38/55/a69fefeef09c2eaabae44b935a1aa81517e49639c0a0c25d861cb18cd7ac/jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222", size = 309503, upload-time = "2025-09-15T09:19:08.191Z" }, - { url = "https://files.pythonhosted.org/packages/bd/d5/a6aba9e6551f32f9c127184f398208e4eddb96c59ac065c8a92056089d28/jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d", size = 317688, upload-time = "2025-09-15T09:19:09.918Z" }, - { url = "https://files.pythonhosted.org/packages/bb/f3/5e86f57c1883971cdc8535d0429c2787bf734840a231da30a3be12850562/jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7", size = 337418, upload-time = "2025-09-15T09:19:11.078Z" }, - { url = "https://files.pythonhosted.org/packages/5e/4f/a71d8a24c2a70664970574a8e0b766663f5ef788f7fe1cc20ee0c016d488/jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d", size = 361423, upload-time = "2025-09-15T09:19:13.286Z" }, - { url = "https://files.pythonhosted.org/packages/8f/e5/b09076f4e7fd9471b91e16f9f3dc7330b161b738f3b39b2c37054a36e26a/jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09", size = 486367, upload-time = "2025-09-15T09:19:14.546Z" }, - { url = "https://files.pythonhosted.org/packages/fb/f1/98cb3a36f5e62f80cd860f0179f948d9eab5a316d55d3e1bab98d9767af5/jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789", size = 376335, upload-time = "2025-09-15T09:19:15.939Z" }, - { url = "https://files.pythonhosted.org/packages/9f/d8/ec74886497ea393c29dbd7651ddecc1899e86404a6b1f84a3ddab0ab59fd/jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347", size = 348981, upload-time = "2025-09-15T09:19:17.568Z" }, - { url = "https://files.pythonhosted.org/packages/24/93/d22ad7fa3b86ade66c86153ceea73094fc2af8b20c59cb7fceab9fea4704/jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648", size = 385797, upload-time = "2025-09-15T09:19:19.121Z" }, - { url = "https://files.pythonhosted.org/packages/c8/bd/e25ff4a4df226e9b885f7cb01ee4b9dc74e3000e612d6f723860d71a1f34/jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4", size = 516597, upload-time = "2025-09-15T09:19:20.301Z" }, - { url = "https://files.pythonhosted.org/packages/be/fb/beda613db7d93ffa2fdd2683f90f2f5dce8daf4bc2d0d2829e7de35308c6/jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1", size = 508853, upload-time = "2025-09-15T09:19:22.075Z" }, - { url = "https://files.pythonhosted.org/packages/20/64/c5b0d93490634e41e38e2a15de5d54fdbd2c9f64a19abb0f95305b63373c/jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982", size = 205140, upload-time = "2025-09-15T09:19:23.351Z" }, - { url = "https://files.pythonhosted.org/packages/a1/e6/c347c0e6f5796e97d4356b7e5ff0ce336498b7f4ef848fae621a56f1ccf3/jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7", size = 204311, upload-time = "2025-09-15T09:19:24.591Z" }, - { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, - { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, - { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, - { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, - { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, - { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, - { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, - { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, - { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, - { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, - { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, - { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, - { url = "https://files.pythonhosted.org/packages/97/c4/d530e514d0f4f29b2b68145e7b389cbc7cac7f9c8c23df43b04d3d10fa3e/jiter-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4441a91b80a80249f9a6452c14b2c24708f139f64de959943dfeaa6cb915e8eb", size = 305021, upload-time = "2025-09-15T09:19:43.523Z" }, - { url = "https://files.pythonhosted.org/packages/7a/77/796a19c567c5734cbfc736a6f987affc0d5f240af8e12063c0fb93990ffa/jiter-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ff85fc6d2a431251ad82dbd1ea953affb5a60376b62e7d6809c5cd058bb39471", size = 314384, upload-time = "2025-09-15T09:19:44.849Z" }, - { url = "https://files.pythonhosted.org/packages/14/9c/824334de0b037b91b6f3fa9fe5a191c83977c7ec4abe17795d3cb6d174cf/jiter-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e86126d64706fd28dfc46f910d496923c6f95b395138c02d0e252947f452bd", size = 337389, upload-time = "2025-09-15T09:19:46.094Z" }, - { url = "https://files.pythonhosted.org/packages/a2/95/ed4feab69e6cf9b2176ea29d4ef9d01a01db210a3a2c8a31a44ecdc68c38/jiter-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad8bd82165961867a10f52010590ce0b7a8c53da5ddd8bbb62fef68c181b921", size = 360519, upload-time = "2025-09-15T09:19:47.494Z" }, - { url = "https://files.pythonhosted.org/packages/b5/0c/2ad00f38d3e583caba3909d95b7da1c3a7cd82c0aa81ff4317a8016fb581/jiter-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b42c2cd74273455ce439fd9528db0c6e84b5623cb74572305bdd9f2f2961d3df", size = 487198, upload-time = "2025-09-15T09:19:49.116Z" }, - { url = "https://files.pythonhosted.org/packages/ea/8b/919b64cf3499b79bdfba6036da7b0cac5d62d5c75a28fb45bad7819e22f0/jiter-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0062dab98172dd0599fcdbf90214d0dcde070b1ff38a00cc1b90e111f071982", size = 377835, upload-time = "2025-09-15T09:19:50.468Z" }, - { url = "https://files.pythonhosted.org/packages/29/7f/8ebe15b6e0a8026b0d286c083b553779b4dd63db35b43a3f171b544de91d/jiter-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb948402821bc76d1f6ef0f9e19b816f9b09f8577844ba7140f0b6afe994bc64", size = 347655, upload-time = "2025-09-15T09:19:51.726Z" }, - { url = "https://files.pythonhosted.org/packages/8e/64/332127cef7e94ac75719dda07b9a472af6158ba819088d87f17f3226a769/jiter-0.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25a5b1110cca7329fd0daf5060faa1234be5c11e988948e4f1a1923b6a457fe1", size = 386135, upload-time = "2025-09-15T09:19:53.075Z" }, - { url = "https://files.pythonhosted.org/packages/20/c8/557b63527442f84c14774159948262a9d4fabb0d61166f11568f22fc60d2/jiter-0.11.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:bf11807e802a214daf6c485037778843fadd3e2ec29377ae17e0706ec1a25758", size = 516063, upload-time = "2025-09-15T09:19:54.447Z" }, - { url = "https://files.pythonhosted.org/packages/86/13/4164c819df4a43cdc8047f9a42880f0ceef5afeb22e8b9675c0528ebdccd/jiter-0.11.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbb57da40631c267861dd0090461222060960012d70fd6e4c799b0f62d0ba166", size = 508139, upload-time = "2025-09-15T09:19:55.764Z" }, - { url = "https://files.pythonhosted.org/packages/fa/70/6e06929b401b331d41ddb4afb9f91cd1168218e3371972f0afa51c9f3c31/jiter-0.11.0-cp313-cp313-win32.whl", hash = "sha256:8e36924dad32c48d3c5e188d169e71dc6e84d6cb8dedefea089de5739d1d2f80", size = 206369, upload-time = "2025-09-15T09:19:57.048Z" }, - { url = "https://files.pythonhosted.org/packages/f4/0d/8185b8e15de6dce24f6afae63380e16377dd75686d56007baa4f29723ea1/jiter-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:452d13e4fd59698408087235259cebe67d9d49173b4dacb3e8d35ce4acf385d6", size = 202538, upload-time = "2025-09-15T09:19:58.35Z" }, - { url = "https://files.pythonhosted.org/packages/13/3a/d61707803260d59520721fa326babfae25e9573a88d8b7b9cb54c5423a59/jiter-0.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:089f9df9f69532d1339e83142438668f52c97cd22ee2d1195551c2b1a9e6cf33", size = 313737, upload-time = "2025-09-15T09:19:59.638Z" }, - { url = "https://files.pythonhosted.org/packages/cd/cc/c9f0eec5d00f2a1da89f6bdfac12b8afdf8d5ad974184863c75060026457/jiter-0.11.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ed1fe69a8c69bf0f2a962d8d706c7b89b50f1332cd6b9fbda014f60bd03a03", size = 346183, upload-time = "2025-09-15T09:20:01.442Z" }, - { url = "https://files.pythonhosted.org/packages/a6/87/fc632776344e7aabbab05a95a0075476f418c5d29ab0f2eec672b7a1f0ac/jiter-0.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a4d71d7ea6ea8786291423fe209acf6f8d398a0759d03e7f24094acb8ab686ba", size = 204225, upload-time = "2025-09-15T09:20:03.102Z" }, - { url = "https://files.pythonhosted.org/packages/ee/3b/e7f45be7d3969bdf2e3cd4b816a7a1d272507cd0edd2d6dc4b07514f2d9a/jiter-0.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9a6dff27eca70930bdbe4cbb7c1a4ba8526e13b63dc808c0670083d2d51a4a72", size = 304414, upload-time = "2025-09-15T09:20:04.357Z" }, - { url = "https://files.pythonhosted.org/packages/06/32/13e8e0d152631fcc1907ceb4943711471be70496d14888ec6e92034e2caf/jiter-0.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b1ae2a7593a62132c7d4c2abbee80bbbb94fdc6d157e2c6cc966250c564ef774", size = 314223, upload-time = "2025-09-15T09:20:05.631Z" }, - { url = "https://files.pythonhosted.org/packages/0c/7e/abedd5b5a20ca083f778d96bba0d2366567fcecb0e6e34ff42640d5d7a18/jiter-0.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b13a431dba4b059e9e43019d3022346d009baf5066c24dcdea321a303cde9f0", size = 337306, upload-time = "2025-09-15T09:20:06.917Z" }, - { url = "https://files.pythonhosted.org/packages/ac/e2/30d59bdc1204c86aa975ec72c48c482fee6633120ee9c3ab755e4dfefea8/jiter-0.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:af62e84ca3889604ebb645df3b0a3f3bcf6b92babbff642bd214616f57abb93a", size = 360565, upload-time = "2025-09-15T09:20:08.283Z" }, - { url = "https://files.pythonhosted.org/packages/fe/88/567288e0d2ed9fa8f7a3b425fdaf2cb82b998633c24fe0d98f5417321aa8/jiter-0.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f3b32bb723246e6b351aecace52aba78adb8eeb4b2391630322dc30ff6c773", size = 486465, upload-time = "2025-09-15T09:20:09.613Z" }, - { url = "https://files.pythonhosted.org/packages/18/6e/7b72d09273214cadd15970e91dd5ed9634bee605176107db21e1e4205eb1/jiter-0.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:adcab442f4a099a358a7f562eaa54ed6456fb866e922c6545a717be51dbed7d7", size = 377581, upload-time = "2025-09-15T09:20:10.884Z" }, - { url = "https://files.pythonhosted.org/packages/58/52/4db456319f9d14deed325f70102577492e9d7e87cf7097bda9769a1fcacb/jiter-0.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9967c2ab338ee2b2c0102fd379ec2693c496abf71ffd47e4d791d1f593b68e2", size = 347102, upload-time = "2025-09-15T09:20:12.175Z" }, - { url = "https://files.pythonhosted.org/packages/ce/b4/433d5703c38b26083aec7a733eb5be96f9c6085d0e270a87ca6482cbf049/jiter-0.11.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e7d0bed3b187af8b47a981d9742ddfc1d9b252a7235471ad6078e7e4e5fe75c2", size = 386477, upload-time = "2025-09-15T09:20:13.428Z" }, - { url = "https://files.pythonhosted.org/packages/c8/7a/a60bfd9c55b55b07c5c441c5085f06420b6d493ce9db28d069cc5b45d9f3/jiter-0.11.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:f6fe0283e903ebc55f1a6cc569b8c1f3bf4abd026fed85e3ff8598a9e6f982f0", size = 516004, upload-time = "2025-09-15T09:20:14.848Z" }, - { url = "https://files.pythonhosted.org/packages/2e/46/f8363e5ecc179b4ed0ca6cb0a6d3bfc266078578c71ff30642ea2ce2f203/jiter-0.11.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:4ee5821e3d66606b29ae5b497230b304f1376f38137d69e35f8d2bd5f310ff73", size = 507855, upload-time = "2025-09-15T09:20:16.176Z" }, - { url = "https://files.pythonhosted.org/packages/90/33/396083357d51d7ff0f9805852c288af47480d30dd31d8abc74909b020761/jiter-0.11.0-cp314-cp314-win32.whl", hash = "sha256:c2d13ba7567ca8799f17c76ed56b1d49be30df996eb7fa33e46b62800562a5e2", size = 205802, upload-time = "2025-09-15T09:20:17.661Z" }, - { url = "https://files.pythonhosted.org/packages/e7/ab/eb06ca556b2551d41de7d03bf2ee24285fa3d0c58c5f8d95c64c9c3281b1/jiter-0.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fb4790497369d134a07fc763cc88888c46f734abdd66f9fdf7865038bf3a8f40", size = 313405, upload-time = "2025-09-15T09:20:18.918Z" }, - { url = "https://files.pythonhosted.org/packages/af/22/7ab7b4ec3a1c1f03aef376af11d23b05abcca3fb31fbca1e7557053b1ba2/jiter-0.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e2bbf24f16ba5ad4441a9845e40e4ea0cb9eed00e76ba94050664ef53ef4406", size = 347102, upload-time = "2025-09-15T09:20:20.16Z" }, - { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380, upload-time = "2025-09-15T09:20:36.867Z" }, + { url = "https://files.pythonhosted.org/packages/12/10/d099def5716452c8d5ffa527405373a44ddaf8e3c9d4f6de1e1344cffd90/jiter-0.11.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ed58841a491bbbf3f7c55a6b68fff568439ab73b2cce27ace0e169057b5851df", size = 310078, upload-time = "2025-10-17T11:28:36.186Z" }, + { url = "https://files.pythonhosted.org/packages/fe/56/b81d010b0031ffa96dfb590628562ac5f513ce56aa2ab451d29fb3fedeb9/jiter-0.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:499beb9b2d7e51d61095a8de39ebcab1d1778f2a74085f8305a969f6cee9f3e4", size = 317138, upload-time = "2025-10-17T11:28:38.294Z" }, + { url = "https://files.pythonhosted.org/packages/89/12/31ea12af9d79671cc7bd893bf0ccaf3467624c0fc7146a0cbfe7b549bcfa/jiter-0.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b87b2821795e28cc990939b68ce7a038edea680a24910bd68a79d54ff3f03c02", size = 348964, upload-time = "2025-10-17T11:28:40.103Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/95cb6dc5ff962410667a29708c7a6c0691cc3c4866a0bfa79d085b56ebd6/jiter-0.11.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:83f6fa494d8bba14ab100417c80e70d32d737e805cb85be2052d771c76fcd1f8", size = 363289, upload-time = "2025-10-17T11:28:41.49Z" }, + { url = "https://files.pythonhosted.org/packages/b8/3e/37006ad5843a0bc3a3ec3a6c44710d7a154113befaf5f26d2fe190668b63/jiter-0.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fbc6aea1daa2ec6f5ed465f0c5e7b0607175062ceebbea5ca70dd5ddab58083", size = 487243, upload-time = "2025-10-17T11:28:43.209Z" }, + { url = "https://files.pythonhosted.org/packages/80/5c/d38c8c801a322a0c0de47b9618c16fd766366f087ce37c4e55ae8e3c8b03/jiter-0.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:302288e2edc43174bb2db838e94688d724f9aad26c5fb9a74f7a5fb427452a6a", size = 376139, upload-time = "2025-10-17T11:28:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/b0/cd/442ad2389a5570b0ee673f93e14bbe8cdecd3e08a9ba7756081d84065e4c/jiter-0.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85db563fe3b367bb568af5d29dea4d4066d923b8e01f3417d25ebecd958de815", size = 359279, upload-time = "2025-10-17T11:28:46.152Z" }, + { url = "https://files.pythonhosted.org/packages/9a/35/8f5810d0e7d00bc395889085dbc1ccc36d454b56f28b2a5359dfd1bab48d/jiter-0.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1c1ba2b6b22f775444ef53bc2d5778396d3520abc7b2e1da8eb0c27cb3ffb10", size = 384911, upload-time = "2025-10-17T11:28:48.03Z" }, + { url = "https://files.pythonhosted.org/packages/3c/bd/8c069ceb0bafcf6b4aa5de0c27f02faf50468df39564a02e1a12389ad6c2/jiter-0.11.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:523be464b14f8fd0cc78da6964b87b5515a056427a2579f9085ce30197a1b54a", size = 517879, upload-time = "2025-10-17T11:28:49.902Z" }, + { url = "https://files.pythonhosted.org/packages/bc/3c/9163efcf762f79f47433078b4f0a1bddc56096082c02c6cae2f47f07f56f/jiter-0.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:25b99b3f04cd2a38fefb22e822e35eb203a2cd37d680dbbc0c0ba966918af336", size = 508739, upload-time = "2025-10-17T11:28:51.785Z" }, + { url = "https://files.pythonhosted.org/packages/44/07/50690f257935845d3114b95b5dd03749eeaab5e395cbb522f9e957da4551/jiter-0.11.1-cp310-cp310-win32.whl", hash = "sha256:47a79e90545a596bb9104109777894033347b11180d4751a216afef14072dbe7", size = 203948, upload-time = "2025-10-17T11:28:54.368Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3a/5964a944bf2e98ffd566153fdc2a6a368fcb11b58cc46832ca8c75808dba/jiter-0.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:cace75621ae9bd66878bf69fbd4dfc1a28ef8661e0c2d0eb72d3d6f1268eddf5", size = 207522, upload-time = "2025-10-17T11:28:56.79Z" }, + { url = "https://files.pythonhosted.org/packages/8b/34/c9e6cfe876f9a24f43ed53fe29f052ce02bd8d5f5a387dbf46ad3764bef0/jiter-0.11.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9b0088ff3c374ce8ce0168523ec8e97122ebb788f950cf7bb8e39c7dc6a876a2", size = 310160, upload-time = "2025-10-17T11:28:59.174Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9f/b06ec8181d7165858faf2ac5287c54fe52b2287760b7fe1ba9c06890255f/jiter-0.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74433962dd3c3090655e02e461267095d6c84f0741c7827de11022ef8d7ff661", size = 316573, upload-time = "2025-10-17T11:29:00.905Z" }, + { url = "https://files.pythonhosted.org/packages/66/49/3179d93090f2ed0c6b091a9c210f266d2d020d82c96f753260af536371d0/jiter-0.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d98030e345e6546df2cc2c08309c502466c66c4747b043f1a0d415fada862b8", size = 348998, upload-time = "2025-10-17T11:29:02.321Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9d/63db2c8eabda7a9cad65a2e808ca34aaa8689d98d498f5a2357d7a2e2cec/jiter-0.11.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d6db0b2e788db46bec2cf729a88b6dd36959af2abd9fa2312dfba5acdd96dcb", size = 363413, upload-time = "2025-10-17T11:29:03.787Z" }, + { url = "https://files.pythonhosted.org/packages/25/ff/3e6b3170c5053053c7baddb8d44e2bf11ff44cd71024a280a8438ae6ba32/jiter-0.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55678fbbda261eafe7289165dd2ddd0e922df5f9a1ae46d7c79a5a15242bd7d1", size = 487144, upload-time = "2025-10-17T11:29:05.37Z" }, + { url = "https://files.pythonhosted.org/packages/b0/50/b63fcadf699893269b997f4c2e88400bc68f085c6db698c6e5e69d63b2c1/jiter-0.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a6b74fae8e40497653b52ce6ca0f1b13457af769af6fb9c1113efc8b5b4d9be", size = 376215, upload-time = "2025-10-17T11:29:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/39/8c/57a8a89401134167e87e73471b9cca321cf651c1fd78c45f3a0f16932213/jiter-0.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a55a453f8b035eb4f7852a79a065d616b7971a17f5e37a9296b4b38d3b619e4", size = 359163, upload-time = "2025-10-17T11:29:09.047Z" }, + { url = "https://files.pythonhosted.org/packages/4b/96/30b0cdbffbb6f753e25339d3dbbe26890c9ef119928314578201c758aace/jiter-0.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2638148099022e6bdb3f42904289cd2e403609356fb06eb36ddec2d50958bc29", size = 385344, upload-time = "2025-10-17T11:29:10.69Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d5/31dae27c1cc9410ad52bb514f11bfa4f286f7d6ef9d287b98b8831e156ec/jiter-0.11.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:252490567a5d990986f83b95a5f1ca1bf205ebd27b3e9e93bb7c2592380e29b9", size = 517972, upload-time = "2025-10-17T11:29:12.174Z" }, + { url = "https://files.pythonhosted.org/packages/61/1e/5905a7a3aceab80de13ab226fd690471a5e1ee7e554dc1015e55f1a6b896/jiter-0.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d431d52b0ca2436eea6195f0f48528202100c7deda354cb7aac0a302167594d5", size = 508408, upload-time = "2025-10-17T11:29:13.597Z" }, + { url = "https://files.pythonhosted.org/packages/91/12/1c49b97aa49077e136e8591cef7162f0d3e2860ae457a2d35868fd1521ef/jiter-0.11.1-cp311-cp311-win32.whl", hash = "sha256:db6f41e40f8bae20c86cb574b48c4fd9f28ee1c71cb044e9ec12e78ab757ba3a", size = 203937, upload-time = "2025-10-17T11:29:14.894Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9d/2255f7c17134ee9892c7e013c32d5bcf4bce64eb115402c9fe5e727a67eb/jiter-0.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0cc407b8e6cdff01b06bb80f61225c8b090c3df108ebade5e0c3c10993735b19", size = 207589, upload-time = "2025-10-17T11:29:16.166Z" }, + { url = "https://files.pythonhosted.org/packages/3c/28/6307fc8f95afef84cae6caf5429fee58ef16a582c2ff4db317ceb3e352fa/jiter-0.11.1-cp311-cp311-win_arm64.whl", hash = "sha256:fe04ea475392a91896d1936367854d346724a1045a247e5d1c196410473b8869", size = 188391, upload-time = "2025-10-17T11:29:17.488Z" }, + { url = "https://files.pythonhosted.org/packages/15/8b/318e8af2c904a9d29af91f78c1e18f0592e189bbdb8a462902d31fe20682/jiter-0.11.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c92148eec91052538ce6823dfca9525f5cfc8b622d7f07e9891a280f61b8c96c", size = 305655, upload-time = "2025-10-17T11:29:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/f7/29/6c7de6b5d6e511d9e736312c0c9bfcee8f9b6bef68182a08b1d78767e627/jiter-0.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ecd4da91b5415f183a6be8f7158d127bdd9e6a3174138293c0d48d6ea2f2009d", size = 315645, upload-time = "2025-10-17T11:29:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5f/ef9e5675511ee0eb7f98dd8c90509e1f7743dbb7c350071acae87b0145f3/jiter-0.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7e3ac25c00b9275684d47aa42febaa90a9958e19fd1726c4ecf755fbe5e553b", size = 348003, upload-time = "2025-10-17T11:29:22.712Z" }, + { url = "https://files.pythonhosted.org/packages/56/1b/abe8c4021010b0a320d3c62682769b700fb66f92c6db02d1a1381b3db025/jiter-0.11.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57d7305c0a841858f866cd459cd9303f73883fb5e097257f3d4a3920722c69d4", size = 365122, upload-time = "2025-10-17T11:29:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/2a/2d/4a18013939a4f24432f805fbd5a19893e64650b933edb057cd405275a538/jiter-0.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e86fa10e117dce22c547f31dd6d2a9a222707d54853d8de4e9a2279d2c97f239", size = 488360, upload-time = "2025-10-17T11:29:25.724Z" }, + { url = "https://files.pythonhosted.org/packages/f0/77/38124f5d02ac4131f0dfbcfd1a19a0fac305fa2c005bc4f9f0736914a1a4/jiter-0.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae5ef1d48aec7e01ee8420155d901bb1d192998fa811a65ebb82c043ee186711", size = 376884, upload-time = "2025-10-17T11:29:27.056Z" }, + { url = "https://files.pythonhosted.org/packages/7b/43/59fdc2f6267959b71dd23ce0bd8d4aeaf55566aa435a5d00f53d53c7eb24/jiter-0.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb68e7bf65c990531ad8715e57d50195daf7c8e6f1509e617b4e692af1108939", size = 358827, upload-time = "2025-10-17T11:29:28.698Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/b3cc20ff5340775ea3bbaa0d665518eddecd4266ba7244c9cb480c0c82ec/jiter-0.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43b30c8154ded5845fa454ef954ee67bfccce629b2dea7d01f795b42bc2bda54", size = 385171, upload-time = "2025-10-17T11:29:30.078Z" }, + { url = "https://files.pythonhosted.org/packages/d2/bc/94dd1f3a61f4dc236f787a097360ec061ceeebebf4ea120b924d91391b10/jiter-0.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:586cafbd9dd1f3ce6a22b4a085eaa6be578e47ba9b18e198d4333e598a91db2d", size = 518359, upload-time = "2025-10-17T11:29:31.464Z" }, + { url = "https://files.pythonhosted.org/packages/7e/8c/12ee132bd67e25c75f542c227f5762491b9a316b0dad8e929c95076f773c/jiter-0.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:677cc2517d437a83bb30019fd4cf7cad74b465914c56ecac3440d597ac135250", size = 509205, upload-time = "2025-10-17T11:29:32.895Z" }, + { url = "https://files.pythonhosted.org/packages/39/d5/9de848928ce341d463c7e7273fce90ea6d0ea4343cd761f451860fa16b59/jiter-0.11.1-cp312-cp312-win32.whl", hash = "sha256:fa992af648fcee2b850a3286a35f62bbbaeddbb6dbda19a00d8fbc846a947b6e", size = 205448, upload-time = "2025-10-17T11:29:34.217Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/8002d78637e05009f5e3fb5288f9d57d65715c33b5d6aa20fd57670feef5/jiter-0.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:88b5cae9fa51efeb3d4bd4e52bfd4c85ccc9cac44282e2a9640893a042ba4d87", size = 204285, upload-time = "2025-10-17T11:29:35.446Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a2/bb24d5587e4dff17ff796716542f663deee337358006a80c8af43ddc11e5/jiter-0.11.1-cp312-cp312-win_arm64.whl", hash = "sha256:9a6cae1ab335551917f882f2c3c1efe7617b71b4c02381e4382a8fc80a02588c", size = 188712, upload-time = "2025-10-17T11:29:37.027Z" }, + { url = "https://files.pythonhosted.org/packages/7c/4b/e4dd3c76424fad02a601d570f4f2a8438daea47ba081201a721a903d3f4c/jiter-0.11.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:71b6a920a5550f057d49d0e8bcc60945a8da998019e83f01adf110e226267663", size = 305272, upload-time = "2025-10-17T11:29:39.249Z" }, + { url = "https://files.pythonhosted.org/packages/67/83/2cd3ad5364191130f4de80eacc907f693723beaab11a46c7d155b07a092c/jiter-0.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b3de72e925388453a5171be83379549300db01284f04d2a6f244d1d8de36f94", size = 314038, upload-time = "2025-10-17T11:29:40.563Z" }, + { url = "https://files.pythonhosted.org/packages/d3/3c/8e67d9ba524e97d2f04c8f406f8769a23205026b13b0938d16646d6e2d3e/jiter-0.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc19dd65a2bd3d9c044c5b4ebf657ca1e6003a97c0fc10f555aa4f7fb9821c00", size = 345977, upload-time = "2025-10-17T11:29:42.009Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a5/489ce64d992c29bccbffabb13961bbb0435e890d7f2d266d1f3df5e917d2/jiter-0.11.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d58faaa936743cd1464540562f60b7ce4fd927e695e8bc31b3da5b914baa9abd", size = 364503, upload-time = "2025-10-17T11:29:43.459Z" }, + { url = "https://files.pythonhosted.org/packages/d4/c0/e321dd83ee231d05c8fe4b1a12caf1f0e8c7a949bf4724d58397104f10f2/jiter-0.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:902640c3103625317291cb73773413b4d71847cdf9383ba65528745ff89f1d14", size = 487092, upload-time = "2025-10-17T11:29:44.835Z" }, + { url = "https://files.pythonhosted.org/packages/f9/5e/8f24ec49c8d37bd37f34ec0112e0b1a3b4b5a7b456c8efff1df5e189ad43/jiter-0.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30405f726e4c2ed487b176c09f8b877a957f535d60c1bf194abb8dadedb5836f", size = 376328, upload-time = "2025-10-17T11:29:46.175Z" }, + { url = "https://files.pythonhosted.org/packages/7f/70/ded107620e809327cf7050727e17ccfa79d6385a771b7fe38fb31318ef00/jiter-0.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3217f61728b0baadd2551844870f65219ac4a1285d5e1a4abddff3d51fdabe96", size = 356632, upload-time = "2025-10-17T11:29:47.454Z" }, + { url = "https://files.pythonhosted.org/packages/19/53/c26f7251613f6a9079275ee43c89b8a973a95ff27532c421abc2a87afb04/jiter-0.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1364cc90c03a8196f35f396f84029f12abe925415049204446db86598c8b72c", size = 384358, upload-time = "2025-10-17T11:29:49.377Z" }, + { url = "https://files.pythonhosted.org/packages/84/16/e0f2cc61e9c4d0b62f6c1bd9b9781d878a427656f88293e2a5335fa8ff07/jiter-0.11.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:53a54bf8e873820ab186b2dca9f6c3303f00d65ae5e7b7d6bda1b95aa472d646", size = 517279, upload-time = "2025-10-17T11:29:50.968Z" }, + { url = "https://files.pythonhosted.org/packages/60/5c/4cd095eaee68961bca3081acbe7c89e12ae24a5dae5fd5d2a13e01ed2542/jiter-0.11.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7e29aca023627b0e0c2392d4248f6414d566ff3974fa08ff2ac8dbb96dfee92a", size = 508276, upload-time = "2025-10-17T11:29:52.619Z" }, + { url = "https://files.pythonhosted.org/packages/4f/25/f459240e69b0e09a7706d96ce203ad615ca36b0fe832308d2b7123abf2d0/jiter-0.11.1-cp313-cp313-win32.whl", hash = "sha256:f153e31d8bca11363751e875c0a70b3d25160ecbaee7b51e457f14498fb39d8b", size = 205593, upload-time = "2025-10-17T11:29:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/461bafe22bae79bab74e217a09c907481a46d520c36b7b9fe71ee8c9e983/jiter-0.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:f773f84080b667c69c4ea0403fc67bb08b07e2b7ce1ef335dea5868451e60fed", size = 203518, upload-time = "2025-10-17T11:29:55.216Z" }, + { url = "https://files.pythonhosted.org/packages/7b/72/c45de6e320edb4fa165b7b1a414193b3cae302dd82da2169d315dcc78b44/jiter-0.11.1-cp313-cp313-win_arm64.whl", hash = "sha256:635ecd45c04e4c340d2187bcb1cea204c7cc9d32c1364d251564bf42e0e39c2d", size = 188062, upload-time = "2025-10-17T11:29:56.631Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/4a57922437ca8753ef823f434c2dec5028b237d84fa320f06a3ba1aec6e8/jiter-0.11.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d892b184da4d94d94ddb4031296931c74ec8b325513a541ebfd6dfb9ae89904b", size = 313814, upload-time = "2025-10-17T11:29:58.509Z" }, + { url = "https://files.pythonhosted.org/packages/76/50/62a0683dadca25490a4bedc6a88d59de9af2a3406dd5a576009a73a1d392/jiter-0.11.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa22c223a3041dacb2fcd37c70dfd648b44662b4a48e242592f95bda5ab09d58", size = 344987, upload-time = "2025-10-17T11:30:00.208Z" }, + { url = "https://files.pythonhosted.org/packages/da/00/2355dbfcbf6cdeaddfdca18287f0f38ae49446bb6378e4a5971e9356fc8a/jiter-0.11.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:330e8e6a11ad4980cd66a0f4a3e0e2e0f646c911ce047014f984841924729789", size = 356399, upload-time = "2025-10-17T11:30:02.084Z" }, + { url = "https://files.pythonhosted.org/packages/c9/07/c2bd748d578fa933d894a55bff33f983bc27f75fc4e491b354bef7b78012/jiter-0.11.1-cp313-cp313t-win_amd64.whl", hash = "sha256:09e2e386ebf298547ca3a3704b729471f7ec666c2906c5c26c1a915ea24741ec", size = 203289, upload-time = "2025-10-17T11:30:03.656Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ee/ace64a853a1acbd318eb0ca167bad1cf5ee037207504b83a868a5849747b/jiter-0.11.1-cp313-cp313t-win_arm64.whl", hash = "sha256:fe4a431c291157e11cee7c34627990ea75e8d153894365a3bc84b7a959d23ca8", size = 188284, upload-time = "2025-10-17T11:30:05.046Z" }, + { url = "https://files.pythonhosted.org/packages/8d/00/d6006d069e7b076e4c66af90656b63da9481954f290d5eca8c715f4bf125/jiter-0.11.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0fa1f70da7a8a9713ff8e5f75ec3f90c0c870be6d526aa95e7c906f6a1c8c676", size = 304624, upload-time = "2025-10-17T11:30:06.678Z" }, + { url = "https://files.pythonhosted.org/packages/fc/45/4a0e31eb996b9ccfddbae4d3017b46f358a599ccf2e19fbffa5e531bd304/jiter-0.11.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:569ee559e5046a42feb6828c55307cf20fe43308e3ae0d8e9e4f8d8634d99944", size = 315042, upload-time = "2025-10-17T11:30:08.87Z" }, + { url = "https://files.pythonhosted.org/packages/e7/91/22f5746f5159a28c76acdc0778801f3c1181799aab196dbea2d29e064968/jiter-0.11.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f69955fa1d92e81987f092b233f0be49d4c937da107b7f7dcf56306f1d3fcce9", size = 346357, upload-time = "2025-10-17T11:30:10.222Z" }, + { url = "https://files.pythonhosted.org/packages/f5/4f/57620857d4e1dc75c8ff4856c90cb6c135e61bff9b4ebfb5dc86814e82d7/jiter-0.11.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:090f4c9d4a825e0fcbd0a2647c9a88a0f366b75654d982d95a9590745ff0c48d", size = 365057, upload-time = "2025-10-17T11:30:11.585Z" }, + { url = "https://files.pythonhosted.org/packages/ce/34/caf7f9cc8ae0a5bb25a5440cc76c7452d264d1b36701b90fdadd28fe08ec/jiter-0.11.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf3d8cedf9e9d825233e0dcac28ff15c47b7c5512fdfe2e25fd5bbb6e6b0cee", size = 487086, upload-time = "2025-10-17T11:30:13.052Z" }, + { url = "https://files.pythonhosted.org/packages/50/17/85b5857c329d533d433fedf98804ebec696004a1f88cabad202b2ddc55cf/jiter-0.11.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2aa9b1958f9c30d3d1a558b75f0626733c60eb9b7774a86b34d88060be1e67fe", size = 376083, upload-time = "2025-10-17T11:30:14.416Z" }, + { url = "https://files.pythonhosted.org/packages/85/d3/2d9f973f828226e6faebdef034097a2918077ea776fb4d88489949024787/jiter-0.11.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42d1ca16590b768c5e7d723055acd2633908baacb3628dd430842e2e035aa90", size = 357825, upload-time = "2025-10-17T11:30:15.765Z" }, + { url = "https://files.pythonhosted.org/packages/f4/55/848d4dabf2c2c236a05468c315c2cb9dc736c5915e65449ccecdba22fb6f/jiter-0.11.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5db4c2486a023820b701a17aec9c5a6173c5ba4393f26662f032f2de9c848b0f", size = 383933, upload-time = "2025-10-17T11:30:17.34Z" }, + { url = "https://files.pythonhosted.org/packages/0b/6c/204c95a4fbb0e26dfa7776c8ef4a878d0c0b215868011cc904bf44f707e2/jiter-0.11.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:4573b78777ccfac954859a6eff45cbd9d281d80c8af049d0f1a3d9fc323d5c3a", size = 517118, upload-time = "2025-10-17T11:30:18.684Z" }, + { url = "https://files.pythonhosted.org/packages/88/25/09956644ea5a2b1e7a2a0f665cb69a973b28f4621fa61fc0c0f06ff40a31/jiter-0.11.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:7593ac6f40831d7961cb67633c39b9fef6689a211d7919e958f45710504f52d3", size = 508194, upload-time = "2025-10-17T11:30:20.719Z" }, + { url = "https://files.pythonhosted.org/packages/09/49/4d1657355d7f5c9e783083a03a3f07d5858efa6916a7d9634d07db1c23bd/jiter-0.11.1-cp314-cp314-win32.whl", hash = "sha256:87202ec6ff9626ff5f9351507def98fcf0df60e9a146308e8ab221432228f4ea", size = 203961, upload-time = "2025-10-17T11:30:22.073Z" }, + { url = "https://files.pythonhosted.org/packages/76/bd/f063bd5cc2712e7ca3cf6beda50894418fc0cfeb3f6ff45a12d87af25996/jiter-0.11.1-cp314-cp314-win_amd64.whl", hash = "sha256:a5dd268f6531a182c89d0dd9a3f8848e86e92dfff4201b77a18e6b98aa59798c", size = 202804, upload-time = "2025-10-17T11:30:23.452Z" }, + { url = "https://files.pythonhosted.org/packages/52/ca/4d84193dfafef1020bf0bedd5e1a8d0e89cb67c54b8519040effc694964b/jiter-0.11.1-cp314-cp314-win_arm64.whl", hash = "sha256:5d761f863f912a44748a21b5c4979c04252588ded8d1d2760976d2e42cd8d991", size = 188001, upload-time = "2025-10-17T11:30:24.915Z" }, + { url = "https://files.pythonhosted.org/packages/d5/fa/3b05e5c9d32efc770a8510eeb0b071c42ae93a5b576fd91cee9af91689a1/jiter-0.11.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2cc5a3965285ddc33e0cab933e96b640bc9ba5940cea27ebbbf6695e72d6511c", size = 312561, upload-time = "2025-10-17T11:30:26.742Z" }, + { url = "https://files.pythonhosted.org/packages/50/d3/335822eb216154ddb79a130cbdce88fdf5c3e2b43dc5dba1fd95c485aaf5/jiter-0.11.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b572b3636a784c2768b2342f36a23078c8d3aa6d8a30745398b1bab58a6f1a8", size = 344551, upload-time = "2025-10-17T11:30:28.252Z" }, + { url = "https://files.pythonhosted.org/packages/31/6d/a0bed13676b1398f9b3ba61f32569f20a3ff270291161100956a577b2dd3/jiter-0.11.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad93e3d67a981f96596d65d2298fe8d1aa649deb5374a2fb6a434410ee11915e", size = 363051, upload-time = "2025-10-17T11:30:30.009Z" }, + { url = "https://files.pythonhosted.org/packages/a4/03/313eda04aa08545a5a04ed5876e52f49ab76a4d98e54578896ca3e16313e/jiter-0.11.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a83097ce379e202dcc3fe3fc71a16d523d1ee9192c8e4e854158f96b3efe3f2f", size = 485897, upload-time = "2025-10-17T11:30:31.429Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/a1011b9d325e40b53b1b96a17c010b8646013417f3902f97a86325b19299/jiter-0.11.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7042c51e7fbeca65631eb0c332f90c0c082eab04334e7ccc28a8588e8e2804d9", size = 375224, upload-time = "2025-10-17T11:30:33.18Z" }, + { url = "https://files.pythonhosted.org/packages/92/da/1b45026b19dd39b419e917165ff0ea629dbb95f374a3a13d2df95e40a6ac/jiter-0.11.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a68d679c0e47649a61df591660507608adc2652442de7ec8276538ac46abe08", size = 356606, upload-time = "2025-10-17T11:30:34.572Z" }, + { url = "https://files.pythonhosted.org/packages/7a/0c/9acb0e54d6a8ba59ce923a180ebe824b4e00e80e56cefde86cc8e0a948be/jiter-0.11.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a1b0da75dbf4b6ec0b3c9e604d1ee8beaf15bc046fff7180f7d89e3cdbd3bb51", size = 384003, upload-time = "2025-10-17T11:30:35.987Z" }, + { url = "https://files.pythonhosted.org/packages/3f/2b/e5a5fe09d6da2145e4eed651e2ce37f3c0cf8016e48b1d302e21fb1628b7/jiter-0.11.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:69dd514bf0fa31c62147d6002e5ca2b3e7ef5894f5ac6f0a19752385f4e89437", size = 516946, upload-time = "2025-10-17T11:30:37.425Z" }, + { url = "https://files.pythonhosted.org/packages/5f/fe/db936e16e0228d48eb81f9934e8327e9fde5185e84f02174fcd22a01be87/jiter-0.11.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:bb31ac0b339efa24c0ca606febd8b77ef11c58d09af1b5f2be4c99e907b11111", size = 507614, upload-time = "2025-10-17T11:30:38.977Z" }, + { url = "https://files.pythonhosted.org/packages/86/db/c4438e8febfb303486d13c6b72f5eb71cf851e300a0c1f0b4140018dd31f/jiter-0.11.1-cp314-cp314t-win32.whl", hash = "sha256:b2ce0d6156a1d3ad41da3eec63b17e03e296b78b0e0da660876fccfada86d2f7", size = 204043, upload-time = "2025-10-17T11:30:40.308Z" }, + { url = "https://files.pythonhosted.org/packages/36/59/81badb169212f30f47f817dfaabf965bc9b8204fed906fab58104ee541f9/jiter-0.11.1-cp314-cp314t-win_amd64.whl", hash = "sha256:f4db07d127b54c4a2d43b4cf05ff0193e4f73e0dd90c74037e16df0b29f666e1", size = 204046, upload-time = "2025-10-17T11:30:41.692Z" }, + { url = "https://files.pythonhosted.org/packages/dd/01/43f7b4eb61db3e565574c4c5714685d042fb652f9eef7e5a3de6aafa943a/jiter-0.11.1-cp314-cp314t-win_arm64.whl", hash = "sha256:28e4fdf2d7ebfc935523e50d1efa3970043cfaa161674fe66f9642409d001dfe", size = 188069, upload-time = "2025-10-17T11:30:43.23Z" }, + { url = "https://files.pythonhosted.org/packages/9d/51/bd41562dd284e2a18b6dc0a99d195fd4a3560d52ab192c42e56fe0316643/jiter-0.11.1-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:e642b5270e61dd02265866398707f90e365b5db2eb65a4f30c789d826682e1f6", size = 306871, upload-time = "2025-10-17T11:31:03.616Z" }, + { url = "https://files.pythonhosted.org/packages/ba/cb/64e7f21dd357e8cd6b3c919c26fac7fc198385bbd1d85bb3b5355600d787/jiter-0.11.1-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:464ba6d000585e4e2fd1e891f31f1231f497273414f5019e27c00a4b8f7a24ad", size = 301454, upload-time = "2025-10-17T11:31:05.338Z" }, + { url = "https://files.pythonhosted.org/packages/55/b0/54bdc00da4ef39801b1419a01035bd8857983de984fd3776b0be6b94add7/jiter-0.11.1-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:055568693ab35e0bf3a171b03bb40b2dcb10352359e0ab9b5ed0da2bf1eb6f6f", size = 336801, upload-time = "2025-10-17T11:31:06.893Z" }, + { url = "https://files.pythonhosted.org/packages/de/8f/87176ed071d42e9db415ed8be787ef4ef31a4fa27f52e6a4fbf34387bd28/jiter-0.11.1-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0c69ea798d08a915ba4478113efa9e694971e410056392f4526d796f136d3fa", size = 343452, upload-time = "2025-10-17T11:31:08.259Z" }, + { url = "https://files.pythonhosted.org/packages/a6/bc/950dd7f170c6394b6fdd73f989d9e729bd98907bcc4430ef080a72d06b77/jiter-0.11.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:0d4d6993edc83cf75e8c6828a8d6ce40a09ee87e38c7bfba6924f39e1337e21d", size = 302626, upload-time = "2025-10-17T11:31:09.645Z" }, + { url = "https://files.pythonhosted.org/packages/3a/65/43d7971ca82ee100b7b9b520573eeef7eabc0a45d490168ebb9a9b5bb8b2/jiter-0.11.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f78d151c83a87a6cf5461d5ee55bc730dd9ae227377ac6f115b922989b95f838", size = 297034, upload-time = "2025-10-17T11:31:10.975Z" }, + { url = "https://files.pythonhosted.org/packages/19/4c/000e1e0c0c67e96557a279f8969487ea2732d6c7311698819f977abae837/jiter-0.11.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9022974781155cd5521d5cb10997a03ee5e31e8454c9d999dcdccd253f2353f", size = 337328, upload-time = "2025-10-17T11:31:12.399Z" }, + { url = "https://files.pythonhosted.org/packages/d9/71/71408b02c6133153336d29fa3ba53000f1e1a3f78bb2fc2d1a1865d2e743/jiter-0.11.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18c77aaa9117510d5bdc6a946baf21b1f0cfa58ef04d31c8d016f206f2118960", size = 343697, upload-time = "2025-10-17T11:31:13.773Z" }, ] [[package]] @@ -417,9 +475,71 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, ] +[[package]] +name = "langchain" +version = "1.0.3" +source = { editable = "../../langchain_v1" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph" }, + { name = "pydantic" }, +] + +[package.metadata] +requires-dist = [ + { name = "langchain-anthropic", marker = "extra == 'anthropic'" }, + { name = "langchain-aws", marker = "extra == 'aws'" }, + { name = "langchain-community", marker = "extra == 'community'" }, + { name = "langchain-core", editable = "../../core" }, + { name = "langchain-deepseek", marker = "extra == 'deepseek'" }, + { name = "langchain-fireworks", marker = "extra == 'fireworks'" }, + { name = "langchain-google-genai", marker = "extra == 'google-genai'" }, + { name = "langchain-google-vertexai", marker = "extra == 'google-vertexai'" }, + { name = "langchain-groq", marker = "extra == 'groq'" }, + { name = "langchain-huggingface", marker = "extra == 'huggingface'" }, + { name = "langchain-mistralai", marker = "extra == 'mistralai'" }, + { name = "langchain-ollama", marker = "extra == 'ollama'" }, + { name = "langchain-openai", marker = "extra == 'openai'", editable = "../openai" }, + { name = "langchain-perplexity", marker = "extra == 'perplexity'" }, + { name = "langchain-together", marker = "extra == 'together'" }, + { name = "langchain-xai", marker = "extra == 'xai'" }, + { name = "langgraph", specifier = ">=1.0.2,<1.1.0" }, + { name = "pydantic", specifier = ">=2.7.4,<3.0.0" }, +] +provides-extras = ["community", "anthropic", "openai", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai", "perplexity"] + +[package.metadata.requires-dev] +lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13.0" }] +test = [ + { name = "langchain-openai", editable = "../openai" }, + { name = "langchain-tests", editable = "../../standard-tests" }, + { name = "pytest", specifier = ">=8.0.0,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.23.2,<2.0.0" }, + { name = "pytest-cov", specifier = ">=4.0.0,<8.0.0" }, + { name = "pytest-mock" }, + { name = "pytest-socket", specifier = ">=0.6.0,<1.0.0" }, + { name = "pytest-watcher", specifier = ">=0.2.6,<1.0.0" }, + { name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" }, + { name = "syrupy", specifier = ">=4.0.2,<5.0.0" }, + { name = "toml", specifier = ">=0.10.2,<1.0.0" }, +] +test-integration = [ + { name = "cassio", specifier = ">=0.1.0,<1.0.0" }, + { name = "langchain-core", editable = "../../core" }, + { name = "langchain-text-splitters", editable = "../../text-splitters" }, + { name = "langchainhub", specifier = ">=0.1.16,<1.0.0" }, + { name = "python-dotenv", specifier = ">=1.0.0,<2.0.0" }, + { name = "vcrpy", specifier = ">=7.0.0,<8.0.0" }, + { name = "wrapt", specifier = ">=1.15.0,<2.0.0" }, +] +typing = [ + { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, + { name = "types-toml", specifier = ">=0.10.8.20240310,<1.0.0.0" }, +] + [[package]] name = "langchain-anthropic" -version = "1.0.0a3" +version = "1.0.1" source = { editable = "." } dependencies = [ { name = "anthropic" }, @@ -437,8 +557,10 @@ lint = [ test = [ { name = "defusedxml" }, { name = "freezegun" }, + { name = "langchain" }, { name = "langchain-core" }, { name = "langchain-tests" }, + { name = "langgraph-prebuilt" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-mock" }, @@ -446,6 +568,7 @@ test = [ { name = "pytest-socket" }, { name = "pytest-timeout" }, { name = "pytest-watcher" }, + { name = "pytest-xdist" }, { name = "syrupy" }, { name = "vcrpy" }, ] @@ -472,8 +595,10 @@ lint = [{ name = "ruff", specifier = ">=0.13.1,<0.14.0" }] test = [ { name = "defusedxml", specifier = ">=0.7.1,<1.0.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, + { name = "langchain", editable = "../../langchain_v1" }, { name = "langchain-core", editable = "../../core" }, { name = "langchain-tests", editable = "../../standard-tests" }, + { name = "langgraph-prebuilt", specifier = ">=0.7.0a2" }, { name = "pytest", specifier = ">=7.3.0,<8.0.0" }, { name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" }, { name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" }, @@ -481,6 +606,7 @@ test = [ { name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" }, { name = "pytest-timeout", specifier = ">=2.3.1,<3.0.0" }, { name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" }, + { name = "pytest-xdist", specifier = ">=3.8.0,<4.0.0" }, { name = "syrupy", specifier = ">=4.0.2,<5.0.0" }, { name = "vcrpy", specifier = ">=7.0.0,<8.0.0" }, ] @@ -496,7 +622,7 @@ typing = [ [[package]] name = "langchain-core" -version = "1.0.0a8" +version = "1.0.3" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -530,6 +656,7 @@ test = [ { name = "blockbuster", specifier = ">=1.5.18,<1.6.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "grandalf", specifier = ">=0.8.0,<1.0.0" }, + { name = "langchain-model-profiles", directory = "../../model-profiles" }, { name = "langchain-tests", directory = "../../standard-tests" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, @@ -546,6 +673,7 @@ test = [ ] test-integration = [] typing = [ + { name = "langchain-model-profiles", directory = "../../model-profiles" }, { name = "langchain-text-splitters", directory = "../../text-splitters" }, { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, @@ -554,13 +682,13 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, { name = "langchain-core" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-benchmark" }, @@ -597,9 +725,65 @@ typing = [ { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, ] +[[package]] +name = "langgraph" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph-checkpoint" }, + { name = "langgraph-prebuilt" }, + { name = "langgraph-sdk" }, + { name = "pydantic" }, + { name = "xxhash" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/25/18e6e056ee1a8af64fcab441b4a3f2e158399935b08f148c7718fc42ecdb/langgraph-1.0.2.tar.gz", hash = "sha256:dae1af08d6025cb1fcaed68f502c01af7d634d9044787c853a46c791cfc52f67", size = 482660, upload-time = "2025-10-29T18:38:28.374Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/b1/9f4912e13d4ed691f2685c8a4b764b5a9237a30cca0c5782bc213d9f0a9a/langgraph-1.0.2-py3-none-any.whl", hash = "sha256:b3d56b8c01de857b5fb1da107e8eab6e30512a377685eeedb4f76456724c9729", size = 156751, upload-time = "2025-10-29T18:38:26.577Z" }, +] + +[[package]] +name = "langgraph-checkpoint" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "ormsgpack" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b7/cb/2a6dad2f0a14317580cc122e2a60e7f0ecabb50aaa6dc5b7a6a2c94cead7/langgraph_checkpoint-3.0.0.tar.gz", hash = "sha256:f738695ad938878d8f4775d907d9629e9fcd345b1950196effb08f088c52369e", size = 132132, upload-time = "2025-10-20T18:35:49.132Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/2a/2efe0b5a72c41e3a936c81c5f5d8693987a1b260287ff1bbebaae1b7b888/langgraph_checkpoint-3.0.0-py3-none-any.whl", hash = "sha256:560beb83e629784ab689212a3d60834fb3196b4bbe1d6ac18e5cad5d85d46010", size = 46060, upload-time = "2025-10-20T18:35:48.255Z" }, +] + +[[package]] +name = "langgraph-prebuilt" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph-checkpoint" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/2f/b940590436e07b3450fe6d791aad5e581363ad536c4f1771e3ba46530268/langgraph_prebuilt-1.0.2.tar.gz", hash = "sha256:9896dbabf04f086eb59df4294f54ab5bdb21cd78e27e0a10e695dffd1cc6097d", size = 142075, upload-time = "2025-10-29T18:29:00.401Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/2f/9a7d00d4afa036e65294059c7c912002fb72ba5dbbd5c2a871ca06360278/langgraph_prebuilt-1.0.2-py3-none-any.whl", hash = "sha256:d9499f7c449fb637ee7b87e3f6a3b74095f4202053c74d33894bd839ea4c57c7", size = 34286, upload-time = "2025-10-29T18:28:59.26Z" }, +] + +[[package]] +name = "langgraph-sdk" +version = "0.2.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "orjson" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/d8/40e01190a73c564a4744e29a6c902f78d34d43dad9b652a363a92a67059c/langgraph_sdk-0.2.9.tar.gz", hash = "sha256:b3bd04c6be4fa382996cd2be8fbc1e7cc94857d2bc6b6f4599a7f2a245975303", size = 99802, upload-time = "2025-09-20T18:49:14.734Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/05/b2d34e16638241e6f27a6946d28160d4b8b641383787646d41a3727e0896/langgraph_sdk-0.2.9-py3-none-any.whl", hash = "sha256:fbf302edadbf0fb343596f91c597794e936ef68eebc0d3e1d358b6f9f72a1429", size = 56752, upload-time = "2025-09-20T18:49:13.346Z" }, +] + [[package]] name = "langsmith" -version = "0.4.31" +version = "0.4.37" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -610,9 +794,9 @@ dependencies = [ { name = "requests-toolbelt" }, { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/55/f5/edbdf89a162ee025348b3b2080fb3b88f4a1040a5a186f32d34aca913994/langsmith-0.4.31.tar.gz", hash = "sha256:5fb3729e22bd9a225391936cb9d1080322e6c375bb776514af06b56d6c46ed3e", size = 959698, upload-time = "2025-09-25T04:18:19.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/51/58d561dd40ec564509724f0a6a7148aa8090143208ef5d06b73b7fc90d31/langsmith-0.4.37.tar.gz", hash = "sha256:d9a0eb6dd93f89843ac982c9f92be93cf2bcabbe19957f362c547766c7366c71", size = 959089, upload-time = "2025-10-15T22:33:59.465Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/8e/e7a43d907a147e1f87eebdd6737483f9feba52a5d4b20f69d0bd6f2fa22f/langsmith-0.4.31-py3-none-any.whl", hash = "sha256:64f340bdead21defe5f4a6ca330c11073e35444989169f669508edf45a19025f", size = 386347, upload-time = "2025-09-25T04:18:16.69Z" }, + { url = "https://files.pythonhosted.org/packages/14/e8/edff4de49cf364eb9ee88d13da0a555844df32438413bf53d90d507b97cd/langsmith-0.4.37-py3-none-any.whl", hash = "sha256:e34a94ce7277646299e4703a0f6e2d2c43647a28e8b800bb7ef82fd87a0ec766", size = 396111, upload-time = "2025-10-15T22:33:57.392Z" }, ] [[package]] @@ -638,104 +822,140 @@ wheels = [ [[package]] name = "multidict" -version = "6.6.4" +version = "6.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" } +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/6b/86f353088c1358e76fd30b0146947fddecee812703b604ee901e85cd2a80/multidict-6.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b8aa6f0bd8125ddd04a6593437bad6a7e70f300ff4180a531654aa2ab3f6d58f", size = 77054, upload-time = "2025-08-11T12:06:02.99Z" }, - { url = "https://files.pythonhosted.org/packages/19/5d/c01dc3d3788bb877bd7f5753ea6eb23c1beeca8044902a8f5bfb54430f63/multidict-6.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b9e5853bbd7264baca42ffc53391b490d65fe62849bf2c690fa3f6273dbcd0cb", size = 44914, upload-time = "2025-08-11T12:06:05.264Z" }, - { url = "https://files.pythonhosted.org/packages/46/44/964dae19ea42f7d3e166474d8205f14bb811020e28bc423d46123ddda763/multidict-6.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0af5f9dee472371e36d6ae38bde009bd8ce65ac7335f55dcc240379d7bed1495", size = 44601, upload-time = "2025-08-11T12:06:06.627Z" }, - { url = "https://files.pythonhosted.org/packages/31/20/0616348a1dfb36cb2ab33fc9521de1f27235a397bf3f59338e583afadd17/multidict-6.6.4-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:d24f351e4d759f5054b641c81e8291e5d122af0fca5c72454ff77f7cbe492de8", size = 224821, upload-time = "2025-08-11T12:06:08.06Z" }, - { url = "https://files.pythonhosted.org/packages/14/26/5d8923c69c110ff51861af05bd27ca6783011b96725d59ccae6d9daeb627/multidict-6.6.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db6a3810eec08280a172a6cd541ff4a5f6a97b161d93ec94e6c4018917deb6b7", size = 242608, upload-time = "2025-08-11T12:06:09.697Z" }, - { url = "https://files.pythonhosted.org/packages/5c/cc/e2ad3ba9459aa34fa65cf1f82a5c4a820a2ce615aacfb5143b8817f76504/multidict-6.6.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a1b20a9d56b2d81e2ff52ecc0670d583eaabaa55f402e8d16dd062373dbbe796", size = 222324, upload-time = "2025-08-11T12:06:10.905Z" }, - { url = "https://files.pythonhosted.org/packages/19/db/4ed0f65701afbc2cb0c140d2d02928bb0fe38dd044af76e58ad7c54fd21f/multidict-6.6.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8c9854df0eaa610a23494c32a6f44a3a550fb398b6b51a56e8c6b9b3689578db", size = 253234, upload-time = "2025-08-11T12:06:12.658Z" }, - { url = "https://files.pythonhosted.org/packages/94/c1/5160c9813269e39ae14b73debb907bfaaa1beee1762da8c4fb95df4764ed/multidict-6.6.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4bb7627fd7a968f41905a4d6343b0d63244a0623f006e9ed989fa2b78f4438a0", size = 251613, upload-time = "2025-08-11T12:06:13.97Z" }, - { url = "https://files.pythonhosted.org/packages/05/a9/48d1bd111fc2f8fb98b2ed7f9a115c55a9355358432a19f53c0b74d8425d/multidict-6.6.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caebafea30ed049c57c673d0b36238b1748683be2593965614d7b0e99125c877", size = 241649, upload-time = "2025-08-11T12:06:15.204Z" }, - { url = "https://files.pythonhosted.org/packages/85/2a/f7d743df0019408768af8a70d2037546a2be7b81fbb65f040d76caafd4c5/multidict-6.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ad887a8250eb47d3ab083d2f98db7f48098d13d42eb7a3b67d8a5c795f224ace", size = 239238, upload-time = "2025-08-11T12:06:16.467Z" }, - { url = "https://files.pythonhosted.org/packages/cb/b8/4f4bb13323c2d647323f7919201493cf48ebe7ded971717bfb0f1a79b6bf/multidict-6.6.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:ed8358ae7d94ffb7c397cecb62cbac9578a83ecefc1eba27b9090ee910e2efb6", size = 233517, upload-time = "2025-08-11T12:06:18.107Z" }, - { url = "https://files.pythonhosted.org/packages/33/29/4293c26029ebfbba4f574febd2ed01b6f619cfa0d2e344217d53eef34192/multidict-6.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ecab51ad2462197a4c000b6d5701fc8585b80eecb90583635d7e327b7b6923eb", size = 243122, upload-time = "2025-08-11T12:06:19.361Z" }, - { url = "https://files.pythonhosted.org/packages/20/60/a1c53628168aa22447bfde3a8730096ac28086704a0d8c590f3b63388d0c/multidict-6.6.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c5c97aa666cf70e667dfa5af945424ba1329af5dd988a437efeb3a09430389fb", size = 248992, upload-time = "2025-08-11T12:06:20.661Z" }, - { url = "https://files.pythonhosted.org/packages/a3/3b/55443a0c372f33cae5d9ec37a6a973802884fa0ab3586659b197cf8cc5e9/multidict-6.6.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:9a950b7cf54099c1209f455ac5970b1ea81410f2af60ed9eb3c3f14f0bfcf987", size = 243708, upload-time = "2025-08-11T12:06:21.891Z" }, - { url = "https://files.pythonhosted.org/packages/7c/60/a18c6900086769312560b2626b18e8cca22d9e85b1186ba77f4755b11266/multidict-6.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:163c7ea522ea9365a8a57832dea7618e6cbdc3cd75f8c627663587459a4e328f", size = 237498, upload-time = "2025-08-11T12:06:23.206Z" }, - { url = "https://files.pythonhosted.org/packages/11/3d/8bdd8bcaff2951ce2affccca107a404925a2beafedd5aef0b5e4a71120a6/multidict-6.6.4-cp310-cp310-win32.whl", hash = "sha256:17d2cbbfa6ff20821396b25890f155f40c986f9cfbce5667759696d83504954f", size = 41415, upload-time = "2025-08-11T12:06:24.77Z" }, - { url = "https://files.pythonhosted.org/packages/c0/53/cab1ad80356a4cd1b685a254b680167059b433b573e53872fab245e9fc95/multidict-6.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:ce9a40fbe52e57e7edf20113a4eaddfacac0561a0879734e636aa6d4bb5e3fb0", size = 46046, upload-time = "2025-08-11T12:06:25.893Z" }, - { url = "https://files.pythonhosted.org/packages/cf/9a/874212b6f5c1c2d870d0a7adc5bb4cfe9b0624fa15cdf5cf757c0f5087ae/multidict-6.6.4-cp310-cp310-win_arm64.whl", hash = "sha256:01d0959807a451fe9fdd4da3e139cb5b77f7328baf2140feeaf233e1d777b729", size = 43147, upload-time = "2025-08-11T12:06:27.534Z" }, - { url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" }, - { url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" }, - { url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" }, - { url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" }, - { url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" }, - { url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" }, - { url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" }, - { url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" }, - { url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" }, - { url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" }, - { url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" }, - { url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" }, - { url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" }, - { url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" }, - { url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" }, - { url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" }, - { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" }, - { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" }, - { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" }, - { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" }, - { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" }, - { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" }, - { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" }, - { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" }, - { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" }, - { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" }, - { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" }, - { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" }, - { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" }, - { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" }, - { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" }, - { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" }, - { url = "https://files.pythonhosted.org/packages/3a/5d/e1db626f64f60008320aab00fbe4f23fc3300d75892a3381275b3d284580/multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e", size = 75848, upload-time = "2025-08-11T12:07:19.912Z" }, - { url = "https://files.pythonhosted.org/packages/4c/aa/8b6f548d839b6c13887253af4e29c939af22a18591bfb5d0ee6f1931dae8/multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657", size = 45060, upload-time = "2025-08-11T12:07:21.163Z" }, - { url = "https://files.pythonhosted.org/packages/eb/c6/f5e97e5d99a729bc2aa58eb3ebfa9f1e56a9b517cc38c60537c81834a73f/multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da", size = 43269, upload-time = "2025-08-11T12:07:22.392Z" }, - { url = "https://files.pythonhosted.org/packages/dc/31/d54eb0c62516776f36fe67f84a732f97e0b0e12f98d5685bebcc6d396910/multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa", size = 237158, upload-time = "2025-08-11T12:07:23.636Z" }, - { url = "https://files.pythonhosted.org/packages/c4/1c/8a10c1c25b23156e63b12165a929d8eb49a6ed769fdbefb06e6f07c1e50d/multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f", size = 257076, upload-time = "2025-08-11T12:07:25.049Z" }, - { url = "https://files.pythonhosted.org/packages/ad/86/90e20b5771d6805a119e483fd3d1e8393e745a11511aebca41f0da38c3e2/multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0", size = 240694, upload-time = "2025-08-11T12:07:26.458Z" }, - { url = "https://files.pythonhosted.org/packages/e7/49/484d3e6b535bc0555b52a0a26ba86e4d8d03fd5587d4936dc59ba7583221/multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879", size = 266350, upload-time = "2025-08-11T12:07:27.94Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b4/aa4c5c379b11895083d50021e229e90c408d7d875471cb3abf721e4670d6/multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a", size = 267250, upload-time = "2025-08-11T12:07:29.303Z" }, - { url = "https://files.pythonhosted.org/packages/80/e5/5e22c5bf96a64bdd43518b1834c6d95a4922cc2066b7d8e467dae9b6cee6/multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f", size = 254900, upload-time = "2025-08-11T12:07:30.764Z" }, - { url = "https://files.pythonhosted.org/packages/17/38/58b27fed927c07035abc02befacab42491e7388ca105e087e6e0215ead64/multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5", size = 252355, upload-time = "2025-08-11T12:07:32.205Z" }, - { url = "https://files.pythonhosted.org/packages/d0/a1/dad75d23a90c29c02b5d6f3d7c10ab36c3197613be5d07ec49c7791e186c/multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438", size = 250061, upload-time = "2025-08-11T12:07:33.623Z" }, - { url = "https://files.pythonhosted.org/packages/b8/1a/ac2216b61c7f116edab6dc3378cca6c70dc019c9a457ff0d754067c58b20/multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e", size = 249675, upload-time = "2025-08-11T12:07:34.958Z" }, - { url = "https://files.pythonhosted.org/packages/d4/79/1916af833b800d13883e452e8e0977c065c4ee3ab7a26941fbfdebc11895/multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7", size = 261247, upload-time = "2025-08-11T12:07:36.588Z" }, - { url = "https://files.pythonhosted.org/packages/c5/65/d1f84fe08ac44a5fc7391cbc20a7cedc433ea616b266284413fd86062f8c/multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812", size = 257960, upload-time = "2025-08-11T12:07:39.735Z" }, - { url = "https://files.pythonhosted.org/packages/13/b5/29ec78057d377b195ac2c5248c773703a6b602e132a763e20ec0457e7440/multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a", size = 250078, upload-time = "2025-08-11T12:07:41.525Z" }, - { url = "https://files.pythonhosted.org/packages/c4/0e/7e79d38f70a872cae32e29b0d77024bef7834b0afb406ddae6558d9e2414/multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69", size = 41708, upload-time = "2025-08-11T12:07:43.405Z" }, - { url = "https://files.pythonhosted.org/packages/9d/34/746696dffff742e97cd6a23da953e55d0ea51fa601fa2ff387b3edcfaa2c/multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf", size = 45912, upload-time = "2025-08-11T12:07:45.082Z" }, - { url = "https://files.pythonhosted.org/packages/c7/87/3bac136181e271e29170d8d71929cdeddeb77f3e8b6a0c08da3a8e9da114/multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605", size = 43076, upload-time = "2025-08-11T12:07:46.746Z" }, - { url = "https://files.pythonhosted.org/packages/64/94/0a8e63e36c049b571c9ae41ee301ada29c3fee9643d9c2548d7d558a1d99/multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb", size = 82812, upload-time = "2025-08-11T12:07:48.402Z" }, - { url = "https://files.pythonhosted.org/packages/25/1a/be8e369dfcd260d2070a67e65dd3990dd635cbd735b98da31e00ea84cd4e/multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e", size = 48313, upload-time = "2025-08-11T12:07:49.679Z" }, - { url = "https://files.pythonhosted.org/packages/26/5a/dd4ade298674b2f9a7b06a32c94ffbc0497354df8285f27317c66433ce3b/multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f", size = 46777, upload-time = "2025-08-11T12:07:51.318Z" }, - { url = "https://files.pythonhosted.org/packages/89/db/98aa28bc7e071bfba611ac2ae803c24e96dd3a452b4118c587d3d872c64c/multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773", size = 229321, upload-time = "2025-08-11T12:07:52.965Z" }, - { url = "https://files.pythonhosted.org/packages/c7/bc/01ddda2a73dd9d167bd85d0e8ef4293836a8f82b786c63fb1a429bc3e678/multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e", size = 249954, upload-time = "2025-08-11T12:07:54.423Z" }, - { url = "https://files.pythonhosted.org/packages/06/78/6b7c0f020f9aa0acf66d0ab4eb9f08375bac9a50ff5e3edb1c4ccd59eafc/multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0", size = 228612, upload-time = "2025-08-11T12:07:55.914Z" }, - { url = "https://files.pythonhosted.org/packages/00/44/3faa416f89b2d5d76e9d447296a81521e1c832ad6e40b92f990697b43192/multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395", size = 257528, upload-time = "2025-08-11T12:07:57.371Z" }, - { url = "https://files.pythonhosted.org/packages/05/5f/77c03b89af0fcb16f018f668207768191fb9dcfb5e3361a5e706a11db2c9/multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45", size = 256329, upload-time = "2025-08-11T12:07:58.844Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e9/ed750a2a9afb4f8dc6f13dc5b67b514832101b95714f1211cd42e0aafc26/multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb", size = 247928, upload-time = "2025-08-11T12:08:01.037Z" }, - { url = "https://files.pythonhosted.org/packages/1f/b5/e0571bc13cda277db7e6e8a532791d4403dacc9850006cb66d2556e649c0/multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5", size = 245228, upload-time = "2025-08-11T12:08:02.96Z" }, - { url = "https://files.pythonhosted.org/packages/f3/a3/69a84b0eccb9824491f06368f5b86e72e4af54c3067c37c39099b6687109/multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141", size = 235869, upload-time = "2025-08-11T12:08:04.746Z" }, - { url = "https://files.pythonhosted.org/packages/a9/9d/28802e8f9121a6a0804fa009debf4e753d0a59969ea9f70be5f5fdfcb18f/multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d", size = 243446, upload-time = "2025-08-11T12:08:06.332Z" }, - { url = "https://files.pythonhosted.org/packages/38/ea/6c98add069b4878c1d66428a5f5149ddb6d32b1f9836a826ac764b9940be/multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d", size = 252299, upload-time = "2025-08-11T12:08:07.931Z" }, - { url = "https://files.pythonhosted.org/packages/3a/09/8fe02d204473e14c0af3affd50af9078839dfca1742f025cca765435d6b4/multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0", size = 246926, upload-time = "2025-08-11T12:08:09.467Z" }, - { url = "https://files.pythonhosted.org/packages/37/3d/7b1e10d774a6df5175ecd3c92bff069e77bed9ec2a927fdd4ff5fe182f67/multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92", size = 243383, upload-time = "2025-08-11T12:08:10.981Z" }, - { url = "https://files.pythonhosted.org/packages/50/b0/a6fae46071b645ae98786ab738447de1ef53742eaad949f27e960864bb49/multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e", size = 47775, upload-time = "2025-08-11T12:08:12.439Z" }, - { url = "https://files.pythonhosted.org/packages/b2/0a/2436550b1520091af0600dff547913cb2d66fbac27a8c33bc1b1bccd8d98/multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4", size = 53100, upload-time = "2025-08-11T12:08:13.823Z" }, - { url = "https://files.pythonhosted.org/packages/97/ea/43ac51faff934086db9c072a94d327d71b7d8b40cd5dcb47311330929ef0/multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad", size = 45501, upload-time = "2025-08-11T12:08:15.173Z" }, - { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, + { url = "https://files.pythonhosted.org/packages/a9/63/7bdd4adc330abcca54c85728db2327130e49e52e8c3ce685cec44e0f2e9f/multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349", size = 77153, upload-time = "2025-10-06T14:48:26.409Z" }, + { url = "https://files.pythonhosted.org/packages/3f/bb/b6c35ff175ed1a3142222b78455ee31be71a8396ed3ab5280fbe3ebe4e85/multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e", size = 44993, upload-time = "2025-10-06T14:48:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/e0/1f/064c77877c5fa6df6d346e68075c0f6998547afe952d6471b4c5f6a7345d/multidict-6.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3", size = 44607, upload-time = "2025-10-06T14:48:29.581Z" }, + { url = "https://files.pythonhosted.org/packages/04/7a/bf6aa92065dd47f287690000b3d7d332edfccb2277634cadf6a810463c6a/multidict-6.7.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046", size = 241847, upload-time = "2025-10-06T14:48:32.107Z" }, + { url = "https://files.pythonhosted.org/packages/94/39/297a8de920f76eda343e4ce05f3b489f0ab3f9504f2576dfb37b7c08ca08/multidict-6.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32", size = 242616, upload-time = "2025-10-06T14:48:34.054Z" }, + { url = "https://files.pythonhosted.org/packages/39/3a/d0eee2898cfd9d654aea6cb8c4addc2f9756e9a7e09391cfe55541f917f7/multidict-6.7.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73", size = 222333, upload-time = "2025-10-06T14:48:35.9Z" }, + { url = "https://files.pythonhosted.org/packages/05/48/3b328851193c7a4240815b71eea165b49248867bbb6153a0aee227a0bb47/multidict-6.7.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc", size = 253239, upload-time = "2025-10-06T14:48:37.302Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ca/0706a98c8d126a89245413225ca4a3fefc8435014de309cf8b30acb68841/multidict-6.7.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62", size = 251618, upload-time = "2025-10-06T14:48:38.963Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/9c7992f245554d8b173f6f0a048ad24b3e645d883f096857ec2c0822b8bd/multidict-6.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84", size = 241655, upload-time = "2025-10-06T14:48:40.312Z" }, + { url = "https://files.pythonhosted.org/packages/31/79/26a85991ae67efd1c0b1fc2e0c275b8a6aceeb155a68861f63f87a798f16/multidict-6.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0", size = 239245, upload-time = "2025-10-06T14:48:41.848Z" }, + { url = "https://files.pythonhosted.org/packages/14/1e/75fa96394478930b79d0302eaf9a6c69f34005a1a5251ac8b9c336486ec9/multidict-6.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e", size = 233523, upload-time = "2025-10-06T14:48:43.749Z" }, + { url = "https://files.pythonhosted.org/packages/b2/5e/085544cb9f9c4ad2b5d97467c15f856df8d9bac410cffd5c43991a5d878b/multidict-6.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4", size = 243129, upload-time = "2025-10-06T14:48:45.225Z" }, + { url = "https://files.pythonhosted.org/packages/b9/c3/e9d9e2f20c9474e7a8fcef28f863c5cbd29bb5adce6b70cebe8bdad0039d/multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648", size = 248999, upload-time = "2025-10-06T14:48:46.703Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3f/df171b6efa3239ae33b97b887e42671cd1d94d460614bfb2c30ffdab3b95/multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111", size = 243711, upload-time = "2025-10-06T14:48:48.146Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2f/9b5564888c4e14b9af64c54acf149263721a283aaf4aa0ae89b091d5d8c1/multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36", size = 237504, upload-time = "2025-10-06T14:48:49.447Z" }, + { url = "https://files.pythonhosted.org/packages/6c/3a/0bd6ca0f7d96d790542d591c8c3354c1e1b6bfd2024d4d92dc3d87485ec7/multidict-6.7.0-cp310-cp310-win32.whl", hash = "sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85", size = 41422, upload-time = "2025-10-06T14:48:50.789Z" }, + { url = "https://files.pythonhosted.org/packages/00/35/f6a637ea2c75f0d3b7c7d41b1189189acff0d9deeb8b8f35536bb30f5e33/multidict-6.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7", size = 46050, upload-time = "2025-10-06T14:48:51.938Z" }, + { url = "https://files.pythonhosted.org/packages/e7/b8/f7bf8329b39893d02d9d95cf610c75885d12fc0f402b1c894e1c8e01c916/multidict-6.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0", size = 43153, upload-time = "2025-10-06T14:48:53.146Z" }, + { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, + { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, + { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, + { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, + { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" }, + { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" }, + { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342, upload-time = "2025-10-06T14:49:58.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082, upload-time = "2025-10-06T14:49:59.89Z" }, + { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704, upload-time = "2025-10-06T14:50:01.485Z" }, + { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355, upload-time = "2025-10-06T14:50:02.955Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259, upload-time = "2025-10-06T14:50:04.446Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903, upload-time = "2025-10-06T14:50:05.98Z" }, + { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365, upload-time = "2025-10-06T14:50:07.511Z" }, + { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062, upload-time = "2025-10-06T14:50:09.074Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683, upload-time = "2025-10-06T14:50:10.714Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" }, + { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" }, + { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" }, + { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" }, + { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" }, + { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" }, + { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" }, + { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" }, + { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" }, + { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588, upload-time = "2025-10-06T14:50:25.716Z" }, + { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966, upload-time = "2025-10-06T14:50:28.192Z" }, + { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618, upload-time = "2025-10-06T14:50:29.82Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539, upload-time = "2025-10-06T14:50:31.731Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345, upload-time = "2025-10-06T14:50:33.26Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934, upload-time = "2025-10-06T14:50:34.808Z" }, + { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243, upload-time = "2025-10-06T14:50:36.436Z" }, + { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878, upload-time = "2025-10-06T14:50:37.953Z" }, + { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452, upload-time = "2025-10-06T14:50:39.574Z" }, + { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" }, + { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, + { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, + { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128, upload-time = "2025-10-06T14:50:51.92Z" }, + { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410, upload-time = "2025-10-06T14:50:53.275Z" }, + { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205, upload-time = "2025-10-06T14:50:54.911Z" }, + { url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128", size = 245084, upload-time = "2025-10-06T14:50:56.369Z" }, + { url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34", size = 252667, upload-time = "2025-10-06T14:50:57.991Z" }, + { url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99", size = 233590, upload-time = "2025-10-06T14:50:59.589Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202", size = 264112, upload-time = "2025-10-06T14:51:01.183Z" }, + { url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1", size = 261194, upload-time = "2025-10-06T14:51:02.794Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3", size = 248510, upload-time = "2025-10-06T14:51:04.724Z" }, + { url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d", size = 248395, upload-time = "2025-10-06T14:51:06.306Z" }, + { url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6", size = 239520, upload-time = "2025-10-06T14:51:08.091Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7", size = 245479, upload-time = "2025-10-06T14:51:10.365Z" }, + { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903, upload-time = "2025-10-06T14:51:12.466Z" }, + { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333, upload-time = "2025-10-06T14:51:14.48Z" }, + { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411, upload-time = "2025-10-06T14:51:16.072Z" }, + { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940, upload-time = "2025-10-06T14:51:17.544Z" }, + { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087, upload-time = "2025-10-06T14:51:18.875Z" }, + { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368, upload-time = "2025-10-06T14:51:20.225Z" }, + { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326, upload-time = "2025-10-06T14:51:21.588Z" }, + { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065, upload-time = "2025-10-06T14:51:22.93Z" }, + { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475, upload-time = "2025-10-06T14:51:24.352Z" }, + { url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e", size = 239324, upload-time = "2025-10-06T14:51:25.822Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064", size = 246877, upload-time = "2025-10-06T14:51:27.604Z" }, + { url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e", size = 225824, upload-time = "2025-10-06T14:51:29.664Z" }, + { url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd", size = 253558, upload-time = "2025-10-06T14:51:31.684Z" }, + { url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a", size = 252339, upload-time = "2025-10-06T14:51:33.699Z" }, + { url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96", size = 244895, upload-time = "2025-10-06T14:51:36.189Z" }, + { url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e", size = 241862, upload-time = "2025-10-06T14:51:41.291Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599", size = 232376, upload-time = "2025-10-06T14:51:43.55Z" }, + { url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394", size = 240272, upload-time = "2025-10-06T14:51:45.265Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774, upload-time = "2025-10-06T14:51:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731, upload-time = "2025-10-06T14:51:48.541Z" }, + { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193, upload-time = "2025-10-06T14:51:50.355Z" }, + { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023, upload-time = "2025-10-06T14:51:51.883Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507, upload-time = "2025-10-06T14:51:53.672Z" }, + { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804, upload-time = "2025-10-06T14:51:55.415Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] [[package]] @@ -860,7 +1080,7 @@ wheels = [ [[package]] name = "numpy" -version = "2.3.3" +version = "2.3.4" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -868,81 +1088,81 @@ resolution-markers = [ "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", ] -sdist = { url = "https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029", size = 20576648, upload-time = "2025-09-09T16:54:12.543Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/f4/098d2270d52b41f1bd7db9fc288aaa0400cb48c2a3e2af6fa365d9720947/numpy-2.3.4.tar.gz", hash = "sha256:a7d018bfedb375a8d979ac758b120ba846a7fe764911a64465fd87b8729f4a6a", size = 20582187, upload-time = "2025-10-15T16:18:11.77Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/45/e80d203ef6b267aa29b22714fb558930b27960a0c5ce3c19c999232bb3eb/numpy-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ffc4f5caba7dfcbe944ed674b7eef683c7e94874046454bb79ed7ee0236f59d", size = 21259253, upload-time = "2025-09-09T15:56:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/52/18/cf2c648fccf339e59302e00e5f2bc87725a3ce1992f30f3f78c9044d7c43/numpy-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7e946c7170858a0295f79a60214424caac2ffdb0063d4d79cb681f9aa0aa569", size = 14450980, upload-time = "2025-09-09T15:56:05.926Z" }, - { url = "https://files.pythonhosted.org/packages/93/fb/9af1082bec870188c42a1c239839915b74a5099c392389ff04215dcee812/numpy-2.3.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:cd4260f64bc794c3390a63bf0728220dd1a68170c169088a1e0dfa2fde1be12f", size = 5379709, upload-time = "2025-09-09T15:56:07.95Z" }, - { url = "https://files.pythonhosted.org/packages/75/0f/bfd7abca52bcbf9a4a65abc83fe18ef01ccdeb37bfb28bbd6ad613447c79/numpy-2.3.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f0ddb4b96a87b6728df9362135e764eac3cfa674499943ebc44ce96c478ab125", size = 6913923, upload-time = "2025-09-09T15:56:09.443Z" }, - { url = "https://files.pythonhosted.org/packages/79/55/d69adad255e87ab7afda1caf93ca997859092afeb697703e2f010f7c2e55/numpy-2.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afd07d377f478344ec6ca2b8d4ca08ae8bd44706763d1efb56397de606393f48", size = 14589591, upload-time = "2025-09-09T15:56:11.234Z" }, - { url = "https://files.pythonhosted.org/packages/10/a2/010b0e27ddeacab7839957d7a8f00e91206e0c2c47abbb5f35a2630e5387/numpy-2.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc92a5dedcc53857249ca51ef29f5e5f2f8c513e22cfb90faeb20343b8c6f7a6", size = 16938714, upload-time = "2025-09-09T15:56:14.637Z" }, - { url = "https://files.pythonhosted.org/packages/1c/6b/12ce8ede632c7126eb2762b9e15e18e204b81725b81f35176eac14dc5b82/numpy-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7af05ed4dc19f308e1d9fc759f36f21921eb7bbfc82843eeec6b2a2863a0aefa", size = 16370592, upload-time = "2025-09-09T15:56:17.285Z" }, - { url = "https://files.pythonhosted.org/packages/b4/35/aba8568b2593067bb6a8fe4c52babb23b4c3b9c80e1b49dff03a09925e4a/numpy-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433bf137e338677cebdd5beac0199ac84712ad9d630b74eceeb759eaa45ddf30", size = 18884474, upload-time = "2025-09-09T15:56:20.943Z" }, - { url = "https://files.pythonhosted.org/packages/45/fa/7f43ba10c77575e8be7b0138d107e4f44ca4a1ef322cd16980ea3e8b8222/numpy-2.3.3-cp311-cp311-win32.whl", hash = "sha256:eb63d443d7b4ffd1e873f8155260d7f58e7e4b095961b01c91062935c2491e57", size = 6599794, upload-time = "2025-09-09T15:56:23.258Z" }, - { url = "https://files.pythonhosted.org/packages/0a/a2/a4f78cb2241fe5664a22a10332f2be886dcdea8784c9f6a01c272da9b426/numpy-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:ec9d249840f6a565f58d8f913bccac2444235025bbb13e9a4681783572ee3caa", size = 13088104, upload-time = "2025-09-09T15:56:25.476Z" }, - { url = "https://files.pythonhosted.org/packages/79/64/e424e975adbd38282ebcd4891661965b78783de893b381cbc4832fb9beb2/numpy-2.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:74c2a948d02f88c11a3c075d9733f1ae67d97c6bdb97f2bb542f980458b257e7", size = 10460772, upload-time = "2025-09-09T15:56:27.679Z" }, - { url = "https://files.pythonhosted.org/packages/51/5d/bb7fc075b762c96329147799e1bcc9176ab07ca6375ea976c475482ad5b3/numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf", size = 20957014, upload-time = "2025-09-09T15:56:29.966Z" }, - { url = "https://files.pythonhosted.org/packages/6b/0e/c6211bb92af26517acd52125a237a92afe9c3124c6a68d3b9f81b62a0568/numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25", size = 14185220, upload-time = "2025-09-09T15:56:32.175Z" }, - { url = "https://files.pythonhosted.org/packages/22/f2/07bb754eb2ede9073f4054f7c0286b0d9d2e23982e090a80d478b26d35ca/numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe", size = 5113918, upload-time = "2025-09-09T15:56:34.175Z" }, - { url = "https://files.pythonhosted.org/packages/81/0a/afa51697e9fb74642f231ea36aca80fa17c8fb89f7a82abd5174023c3960/numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b", size = 6647922, upload-time = "2025-09-09T15:56:36.149Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f5/122d9cdb3f51c520d150fef6e87df9279e33d19a9611a87c0d2cf78a89f4/numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8", size = 14281991, upload-time = "2025-09-09T15:56:40.548Z" }, - { url = "https://files.pythonhosted.org/packages/51/64/7de3c91e821a2debf77c92962ea3fe6ac2bc45d0778c1cbe15d4fce2fd94/numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20", size = 16641643, upload-time = "2025-09-09T15:56:43.343Z" }, - { url = "https://files.pythonhosted.org/packages/30/e4/961a5fa681502cd0d68907818b69f67542695b74e3ceaa513918103b7e80/numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea", size = 16056787, upload-time = "2025-09-09T15:56:46.141Z" }, - { url = "https://files.pythonhosted.org/packages/99/26/92c912b966e47fbbdf2ad556cb17e3a3088e2e1292b9833be1dfa5361a1a/numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7", size = 18579598, upload-time = "2025-09-09T15:56:49.844Z" }, - { url = "https://files.pythonhosted.org/packages/17/b6/fc8f82cb3520768718834f310c37d96380d9dc61bfdaf05fe5c0b7653e01/numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf", size = 6320800, upload-time = "2025-09-09T15:56:52.499Z" }, - { url = "https://files.pythonhosted.org/packages/32/ee/de999f2625b80d043d6d2d628c07d0d5555a677a3cf78fdf868d409b8766/numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb", size = 12786615, upload-time = "2025-09-09T15:56:54.422Z" }, - { url = "https://files.pythonhosted.org/packages/49/6e/b479032f8a43559c383acb20816644f5f91c88f633d9271ee84f3b3a996c/numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5", size = 10195936, upload-time = "2025-09-09T15:56:56.541Z" }, - { url = "https://files.pythonhosted.org/packages/7d/b9/984c2b1ee61a8b803bf63582b4ac4242cf76e2dbd663efeafcb620cc0ccb/numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf", size = 20949588, upload-time = "2025-09-09T15:56:59.087Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e4/07970e3bed0b1384d22af1e9912527ecbeb47d3b26e9b6a3bced068b3bea/numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7", size = 14177802, upload-time = "2025-09-09T15:57:01.73Z" }, - { url = "https://files.pythonhosted.org/packages/35/c7/477a83887f9de61f1203bad89cf208b7c19cc9fef0cebef65d5a1a0619f2/numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6", size = 5106537, upload-time = "2025-09-09T15:57:03.765Z" }, - { url = "https://files.pythonhosted.org/packages/52/47/93b953bd5866a6f6986344d045a207d3f1cfbad99db29f534ea9cee5108c/numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7", size = 6640743, upload-time = "2025-09-09T15:57:07.921Z" }, - { url = "https://files.pythonhosted.org/packages/23/83/377f84aaeb800b64c0ef4de58b08769e782edcefa4fea712910b6f0afd3c/numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c", size = 14278881, upload-time = "2025-09-09T15:57:11.349Z" }, - { url = "https://files.pythonhosted.org/packages/9a/a5/bf3db6e66c4b160d6ea10b534c381a1955dfab34cb1017ea93aa33c70ed3/numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93", size = 16636301, upload-time = "2025-09-09T15:57:14.245Z" }, - { url = "https://files.pythonhosted.org/packages/a2/59/1287924242eb4fa3f9b3a2c30400f2e17eb2707020d1c5e3086fe7330717/numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae", size = 16053645, upload-time = "2025-09-09T15:57:16.534Z" }, - { url = "https://files.pythonhosted.org/packages/e6/93/b3d47ed882027c35e94ac2320c37e452a549f582a5e801f2d34b56973c97/numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86", size = 18578179, upload-time = "2025-09-09T15:57:18.883Z" }, - { url = "https://files.pythonhosted.org/packages/20/d9/487a2bccbf7cc9d4bfc5f0f197761a5ef27ba870f1e3bbb9afc4bbe3fcc2/numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8", size = 6312250, upload-time = "2025-09-09T15:57:21.296Z" }, - { url = "https://files.pythonhosted.org/packages/1b/b5/263ebbbbcede85028f30047eab3d58028d7ebe389d6493fc95ae66c636ab/numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf", size = 12783269, upload-time = "2025-09-09T15:57:23.034Z" }, - { url = "https://files.pythonhosted.org/packages/fa/75/67b8ca554bbeaaeb3fac2e8bce46967a5a06544c9108ec0cf5cece559b6c/numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5", size = 10195314, upload-time = "2025-09-09T15:57:25.045Z" }, - { url = "https://files.pythonhosted.org/packages/11/d0/0d1ddec56b162042ddfafeeb293bac672de9b0cfd688383590090963720a/numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc", size = 21048025, upload-time = "2025-09-09T15:57:27.257Z" }, - { url = "https://files.pythonhosted.org/packages/36/9e/1996ca6b6d00415b6acbdd3c42f7f03ea256e2c3f158f80bd7436a8a19f3/numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc", size = 14301053, upload-time = "2025-09-09T15:57:30.077Z" }, - { url = "https://files.pythonhosted.org/packages/05/24/43da09aa764c68694b76e84b3d3f0c44cb7c18cdc1ba80e48b0ac1d2cd39/numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b", size = 5229444, upload-time = "2025-09-09T15:57:32.733Z" }, - { url = "https://files.pythonhosted.org/packages/bc/14/50ffb0f22f7218ef8af28dd089f79f68289a7a05a208db9a2c5dcbe123c1/numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19", size = 6738039, upload-time = "2025-09-09T15:57:34.328Z" }, - { url = "https://files.pythonhosted.org/packages/55/52/af46ac0795e09657d45a7f4db961917314377edecf66db0e39fa7ab5c3d3/numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30", size = 14352314, upload-time = "2025-09-09T15:57:36.255Z" }, - { url = "https://files.pythonhosted.org/packages/a7/b1/dc226b4c90eb9f07a3fff95c2f0db3268e2e54e5cce97c4ac91518aee71b/numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e", size = 16701722, upload-time = "2025-09-09T15:57:38.622Z" }, - { url = "https://files.pythonhosted.org/packages/9d/9d/9d8d358f2eb5eced14dba99f110d83b5cd9a4460895230f3b396ad19a323/numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3", size = 16132755, upload-time = "2025-09-09T15:57:41.16Z" }, - { url = "https://files.pythonhosted.org/packages/b6/27/b3922660c45513f9377b3fb42240bec63f203c71416093476ec9aa0719dc/numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea", size = 18651560, upload-time = "2025-09-09T15:57:43.459Z" }, - { url = "https://files.pythonhosted.org/packages/5b/8e/3ab61a730bdbbc201bb245a71102aa609f0008b9ed15255500a99cd7f780/numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd", size = 6442776, upload-time = "2025-09-09T15:57:45.793Z" }, - { url = "https://files.pythonhosted.org/packages/1c/3a/e22b766b11f6030dc2decdeff5c2fb1610768055603f9f3be88b6d192fb2/numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d", size = 12927281, upload-time = "2025-09-09T15:57:47.492Z" }, - { url = "https://files.pythonhosted.org/packages/7b/42/c2e2bc48c5e9b2a83423f99733950fbefd86f165b468a3d85d52b30bf782/numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1", size = 10265275, upload-time = "2025-09-09T15:57:49.647Z" }, - { url = "https://files.pythonhosted.org/packages/6b/01/342ad585ad82419b99bcf7cebe99e61da6bedb89e213c5fd71acc467faee/numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593", size = 20951527, upload-time = "2025-09-09T15:57:52.006Z" }, - { url = "https://files.pythonhosted.org/packages/ef/d8/204e0d73fc1b7a9ee80ab1fe1983dd33a4d64a4e30a05364b0208e9a241a/numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652", size = 14186159, upload-time = "2025-09-09T15:57:54.407Z" }, - { url = "https://files.pythonhosted.org/packages/22/af/f11c916d08f3a18fb8ba81ab72b5b74a6e42ead4c2846d270eb19845bf74/numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7", size = 5114624, upload-time = "2025-09-09T15:57:56.5Z" }, - { url = "https://files.pythonhosted.org/packages/fb/11/0ed919c8381ac9d2ffacd63fd1f0c34d27e99cab650f0eb6f110e6ae4858/numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a", size = 6642627, upload-time = "2025-09-09T15:57:58.206Z" }, - { url = "https://files.pythonhosted.org/packages/ee/83/deb5f77cb0f7ba6cb52b91ed388b47f8f3c2e9930d4665c600408d9b90b9/numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe", size = 14296926, upload-time = "2025-09-09T15:58:00.035Z" }, - { url = "https://files.pythonhosted.org/packages/77/cc/70e59dcb84f2b005d4f306310ff0a892518cc0c8000a33d0e6faf7ca8d80/numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421", size = 16638958, upload-time = "2025-09-09T15:58:02.738Z" }, - { url = "https://files.pythonhosted.org/packages/b6/5a/b2ab6c18b4257e099587d5b7f903317bd7115333ad8d4ec4874278eafa61/numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021", size = 16071920, upload-time = "2025-09-09T15:58:05.029Z" }, - { url = "https://files.pythonhosted.org/packages/b8/f1/8b3fdc44324a259298520dd82147ff648979bed085feeacc1250ef1656c0/numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf", size = 18577076, upload-time = "2025-09-09T15:58:07.745Z" }, - { url = "https://files.pythonhosted.org/packages/f0/a1/b87a284fb15a42e9274e7fcea0dad259d12ddbf07c1595b26883151ca3b4/numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0", size = 6366952, upload-time = "2025-09-09T15:58:10.096Z" }, - { url = "https://files.pythonhosted.org/packages/70/5f/1816f4d08f3b8f66576d8433a66f8fa35a5acfb3bbd0bf6c31183b003f3d/numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8", size = 12919322, upload-time = "2025-09-09T15:58:12.138Z" }, - { url = "https://files.pythonhosted.org/packages/8c/de/072420342e46a8ea41c324a555fa90fcc11637583fb8df722936aed1736d/numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe", size = 10478630, upload-time = "2025-09-09T15:58:14.64Z" }, - { url = "https://files.pythonhosted.org/packages/d5/df/ee2f1c0a9de7347f14da5dd3cd3c3b034d1b8607ccb6883d7dd5c035d631/numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00", size = 21047987, upload-time = "2025-09-09T15:58:16.889Z" }, - { url = "https://files.pythonhosted.org/packages/d6/92/9453bdc5a4e9e69cf4358463f25e8260e2ffc126d52e10038b9077815989/numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a", size = 14301076, upload-time = "2025-09-09T15:58:20.343Z" }, - { url = "https://files.pythonhosted.org/packages/13/77/1447b9eb500f028bb44253105bd67534af60499588a5149a94f18f2ca917/numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d", size = 5229491, upload-time = "2025-09-09T15:58:22.481Z" }, - { url = "https://files.pythonhosted.org/packages/3d/f9/d72221b6ca205f9736cb4b2ce3b002f6e45cd67cd6a6d1c8af11a2f0b649/numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a", size = 6737913, upload-time = "2025-09-09T15:58:24.569Z" }, - { url = "https://files.pythonhosted.org/packages/3c/5f/d12834711962ad9c46af72f79bb31e73e416ee49d17f4c797f72c96b6ca5/numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54", size = 14352811, upload-time = "2025-09-09T15:58:26.416Z" }, - { url = "https://files.pythonhosted.org/packages/a1/0d/fdbec6629d97fd1bebed56cd742884e4eead593611bbe1abc3eb40d304b2/numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e", size = 16702689, upload-time = "2025-09-09T15:58:28.831Z" }, - { url = "https://files.pythonhosted.org/packages/9b/09/0a35196dc5575adde1eb97ddfbc3e1687a814f905377621d18ca9bc2b7dd/numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097", size = 16133855, upload-time = "2025-09-09T15:58:31.349Z" }, - { url = "https://files.pythonhosted.org/packages/7a/ca/c9de3ea397d576f1b6753eaa906d4cdef1bf97589a6d9825a349b4729cc2/numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970", size = 18652520, upload-time = "2025-09-09T15:58:33.762Z" }, - { url = "https://files.pythonhosted.org/packages/fd/c2/e5ed830e08cd0196351db55db82f65bc0ab05da6ef2b72a836dcf1936d2f/numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5", size = 6515371, upload-time = "2025-09-09T15:58:36.04Z" }, - { url = "https://files.pythonhosted.org/packages/47/c7/b0f6b5b67f6788a0725f744496badbb604d226bf233ba716683ebb47b570/numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f", size = 13112576, upload-time = "2025-09-09T15:58:37.927Z" }, - { url = "https://files.pythonhosted.org/packages/06/b9/33bba5ff6fb679aa0b1f8a07e853f002a6b04b9394db3069a1270a7784ca/numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b", size = 10545953, upload-time = "2025-09-09T15:58:40.576Z" }, - { url = "https://files.pythonhosted.org/packages/b8/f2/7e0a37cfced2644c9563c529f29fa28acbd0960dde32ece683aafa6f4949/numpy-2.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1e02c7159791cd481e1e6d5ddd766b62a4d5acf8df4d4d1afe35ee9c5c33a41e", size = 21131019, upload-time = "2025-09-09T15:58:42.838Z" }, - { url = "https://files.pythonhosted.org/packages/1a/7e/3291f505297ed63831135a6cc0f474da0c868a1f31b0dd9a9f03a7a0d2ed/numpy-2.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:dca2d0fc80b3893ae72197b39f69d55a3cd8b17ea1b50aa4c62de82419936150", size = 14376288, upload-time = "2025-09-09T15:58:45.425Z" }, - { url = "https://files.pythonhosted.org/packages/bf/4b/ae02e985bdeee73d7b5abdefeb98aef1207e96d4c0621ee0cf228ddfac3c/numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:99683cbe0658f8271b333a1b1b4bb3173750ad59c0c61f5bbdc5b318918fffe3", size = 5305425, upload-time = "2025-09-09T15:58:48.6Z" }, - { url = "https://files.pythonhosted.org/packages/8b/eb/9df215d6d7250db32007941500dc51c48190be25f2401d5b2b564e467247/numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d9d537a39cc9de668e5cd0e25affb17aec17b577c6b3ae8a3d866b479fbe88d0", size = 6819053, upload-time = "2025-09-09T15:58:50.401Z" }, - { url = "https://files.pythonhosted.org/packages/57/62/208293d7d6b2a8998a4a1f23ac758648c3c32182d4ce4346062018362e29/numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8596ba2f8af5f93b01d97563832686d20206d303024777f6dfc2e7c7c3f1850e", size = 14420354, upload-time = "2025-09-09T15:58:52.704Z" }, - { url = "https://files.pythonhosted.org/packages/ed/0c/8e86e0ff7072e14a71b4c6af63175e40d1e7e933ce9b9e9f765a95b4e0c3/numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1ec5615b05369925bd1125f27df33f3b6c8bc10d788d5999ecd8769a1fa04db", size = 16760413, upload-time = "2025-09-09T15:58:55.027Z" }, - { url = "https://files.pythonhosted.org/packages/af/11/0cc63f9f321ccf63886ac203336777140011fb669e739da36d8db3c53b98/numpy-2.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2e267c7da5bf7309670523896df97f93f6e469fb931161f483cd6882b3b1a5dc", size = 12971844, upload-time = "2025-09-09T15:58:57.359Z" }, + { url = "https://files.pythonhosted.org/packages/60/e7/0e07379944aa8afb49a556a2b54587b828eb41dc9adc56fb7615b678ca53/numpy-2.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e78aecd2800b32e8347ce49316d3eaf04aed849cd5b38e0af39f829a4e59f5eb", size = 21259519, upload-time = "2025-10-15T16:15:19.012Z" }, + { url = "https://files.pythonhosted.org/packages/d0/cb/5a69293561e8819b09e34ed9e873b9a82b5f2ade23dce4c51dc507f6cfe1/numpy-2.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fd09cc5d65bda1e79432859c40978010622112e9194e581e3415a3eccc7f43f", size = 14452796, upload-time = "2025-10-15T16:15:23.094Z" }, + { url = "https://files.pythonhosted.org/packages/e4/04/ff11611200acd602a1e5129e36cfd25bf01ad8e5cf927baf2e90236eb02e/numpy-2.3.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1b219560ae2c1de48ead517d085bc2d05b9433f8e49d0955c82e8cd37bd7bf36", size = 5381639, upload-time = "2025-10-15T16:15:25.572Z" }, + { url = "https://files.pythonhosted.org/packages/ea/77/e95c757a6fe7a48d28a009267408e8aa382630cc1ad1db7451b3bc21dbb4/numpy-2.3.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:bafa7d87d4c99752d07815ed7a2c0964f8ab311eb8168f41b910bd01d15b6032", size = 6914296, upload-time = "2025-10-15T16:15:27.079Z" }, + { url = "https://files.pythonhosted.org/packages/a3/d2/137c7b6841c942124eae921279e5c41b1c34bab0e6fc60c7348e69afd165/numpy-2.3.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36dc13af226aeab72b7abad501d370d606326a0029b9f435eacb3b8c94b8a8b7", size = 14591904, upload-time = "2025-10-15T16:15:29.044Z" }, + { url = "https://files.pythonhosted.org/packages/bb/32/67e3b0f07b0aba57a078c4ab777a9e8e6bc62f24fb53a2337f75f9691699/numpy-2.3.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7b2f9a18b5ff9824a6af80de4f37f4ec3c2aab05ef08f51c77a093f5b89adda", size = 16939602, upload-time = "2025-10-15T16:15:31.106Z" }, + { url = "https://files.pythonhosted.org/packages/95/22/9639c30e32c93c4cee3ccdb4b09c2d0fbff4dcd06d36b357da06146530fb/numpy-2.3.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9984bd645a8db6ca15d850ff996856d8762c51a2239225288f08f9050ca240a0", size = 16372661, upload-time = "2025-10-15T16:15:33.546Z" }, + { url = "https://files.pythonhosted.org/packages/12/e9/a685079529be2b0156ae0c11b13d6be647743095bb51d46589e95be88086/numpy-2.3.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:64c5825affc76942973a70acf438a8ab618dbd692b84cd5ec40a0a0509edc09a", size = 18884682, upload-time = "2025-10-15T16:15:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/cf/85/f6f00d019b0cc741e64b4e00ce865a57b6bed945d1bbeb1ccadbc647959b/numpy-2.3.4-cp311-cp311-win32.whl", hash = "sha256:ed759bf7a70342f7817d88376eb7142fab9fef8320d6019ef87fae05a99874e1", size = 6570076, upload-time = "2025-10-15T16:15:38.225Z" }, + { url = "https://files.pythonhosted.org/packages/7d/10/f8850982021cb90e2ec31990291f9e830ce7d94eef432b15066e7cbe0bec/numpy-2.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:faba246fb30ea2a526c2e9645f61612341de1a83fb1e0c5edf4ddda5a9c10996", size = 13089358, upload-time = "2025-10-15T16:15:40.404Z" }, + { url = "https://files.pythonhosted.org/packages/d1/ad/afdd8351385edf0b3445f9e24210a9c3971ef4de8fd85155462fc4321d79/numpy-2.3.4-cp311-cp311-win_arm64.whl", hash = "sha256:4c01835e718bcebe80394fd0ac66c07cbb90147ebbdad3dcecd3f25de2ae7e2c", size = 10462292, upload-time = "2025-10-15T16:15:42.896Z" }, + { url = "https://files.pythonhosted.org/packages/96/7a/02420400b736f84317e759291b8edaeee9dc921f72b045475a9cbdb26b17/numpy-2.3.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ef1b5a3e808bc40827b5fa2c8196151a4c5abe110e1726949d7abddfe5c7ae11", size = 20957727, upload-time = "2025-10-15T16:15:44.9Z" }, + { url = "https://files.pythonhosted.org/packages/18/90/a014805d627aa5750f6f0e878172afb6454552da929144b3c07fcae1bb13/numpy-2.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2f91f496a87235c6aaf6d3f3d89b17dba64996abadccb289f48456cff931ca9", size = 14187262, upload-time = "2025-10-15T16:15:47.761Z" }, + { url = "https://files.pythonhosted.org/packages/c7/e4/0a94b09abe89e500dc748e7515f21a13e30c5c3fe3396e6d4ac108c25fca/numpy-2.3.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f77e5b3d3da652b474cc80a14084927a5e86a5eccf54ca8ca5cbd697bf7f2667", size = 5115992, upload-time = "2025-10-15T16:15:50.144Z" }, + { url = "https://files.pythonhosted.org/packages/88/dd/db77c75b055c6157cbd4f9c92c4458daef0dd9cbe6d8d2fe7f803cb64c37/numpy-2.3.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ab1c5f5ee40d6e01cbe96de5863e39b215a4d24e7d007cad56c7184fdf4aeef", size = 6648672, upload-time = "2025-10-15T16:15:52.442Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e6/e31b0d713719610e406c0ea3ae0d90760465b086da8783e2fd835ad59027/numpy-2.3.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77b84453f3adcb994ddbd0d1c5d11db2d6bda1a2b7fd5ac5bd4649d6f5dc682e", size = 14284156, upload-time = "2025-10-15T16:15:54.351Z" }, + { url = "https://files.pythonhosted.org/packages/f9/58/30a85127bfee6f108282107caf8e06a1f0cc997cb6b52cdee699276fcce4/numpy-2.3.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4121c5beb58a7f9e6dfdee612cb24f4df5cd4db6e8261d7f4d7450a997a65d6a", size = 16641271, upload-time = "2025-10-15T16:15:56.67Z" }, + { url = "https://files.pythonhosted.org/packages/06/f2/2e06a0f2adf23e3ae29283ad96959267938d0efd20a2e25353b70065bfec/numpy-2.3.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65611ecbb00ac9846efe04db15cbe6186f562f6bb7e5e05f077e53a599225d16", size = 16059531, upload-time = "2025-10-15T16:15:59.412Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e7/b106253c7c0d5dc352b9c8fab91afd76a93950998167fa3e5afe4ef3a18f/numpy-2.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dabc42f9c6577bcc13001b8810d300fe814b4cfbe8a92c873f269484594f9786", size = 18578983, upload-time = "2025-10-15T16:16:01.804Z" }, + { url = "https://files.pythonhosted.org/packages/73/e3/04ecc41e71462276ee867ccbef26a4448638eadecf1bc56772c9ed6d0255/numpy-2.3.4-cp312-cp312-win32.whl", hash = "sha256:a49d797192a8d950ca59ee2d0337a4d804f713bb5c3c50e8db26d49666e351dc", size = 6291380, upload-time = "2025-10-15T16:16:03.938Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a8/566578b10d8d0e9955b1b6cd5db4e9d4592dd0026a941ff7994cedda030a/numpy-2.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:985f1e46358f06c2a09921e8921e2c98168ed4ae12ccd6e5e87a4f1857923f32", size = 12787999, upload-time = "2025-10-15T16:16:05.801Z" }, + { url = "https://files.pythonhosted.org/packages/58/22/9c903a957d0a8071b607f5b1bff0761d6e608b9a965945411f867d515db1/numpy-2.3.4-cp312-cp312-win_arm64.whl", hash = "sha256:4635239814149e06e2cb9db3dd584b2fa64316c96f10656983b8026a82e6e4db", size = 10197412, upload-time = "2025-10-15T16:16:07.854Z" }, + { url = "https://files.pythonhosted.org/packages/57/7e/b72610cc91edf138bc588df5150957a4937221ca6058b825b4725c27be62/numpy-2.3.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c090d4860032b857d94144d1a9976b8e36709e40386db289aaf6672de2a81966", size = 20950335, upload-time = "2025-10-15T16:16:10.304Z" }, + { url = "https://files.pythonhosted.org/packages/3e/46/bdd3370dcea2f95ef14af79dbf81e6927102ddf1cc54adc0024d61252fd9/numpy-2.3.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a13fc473b6db0be619e45f11f9e81260f7302f8d180c49a22b6e6120022596b3", size = 14179878, upload-time = "2025-10-15T16:16:12.595Z" }, + { url = "https://files.pythonhosted.org/packages/ac/01/5a67cb785bda60f45415d09c2bc245433f1c68dd82eef9c9002c508b5a65/numpy-2.3.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:3634093d0b428e6c32c3a69b78e554f0cd20ee420dcad5a9f3b2a63762ce4197", size = 5108673, upload-time = "2025-10-15T16:16:14.877Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cd/8428e23a9fcebd33988f4cb61208fda832800ca03781f471f3727a820704/numpy-2.3.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:043885b4f7e6e232d7df4f51ffdef8c36320ee9d5f227b380ea636722c7ed12e", size = 6641438, upload-time = "2025-10-15T16:16:16.805Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d1/913fe563820f3c6b079f992458f7331278dcd7ba8427e8e745af37ddb44f/numpy-2.3.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ee6a571d1e4f0ea6d5f22d6e5fbd6ed1dc2b18542848e1e7301bd190500c9d7", size = 14281290, upload-time = "2025-10-15T16:16:18.764Z" }, + { url = "https://files.pythonhosted.org/packages/9e/7e/7d306ff7cb143e6d975cfa7eb98a93e73495c4deabb7d1b5ecf09ea0fd69/numpy-2.3.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fc8a63918b04b8571789688b2780ab2b4a33ab44bfe8ccea36d3eba51228c953", size = 16636543, upload-time = "2025-10-15T16:16:21.072Z" }, + { url = "https://files.pythonhosted.org/packages/47/6a/8cfc486237e56ccfb0db234945552a557ca266f022d281a2f577b98e955c/numpy-2.3.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:40cc556d5abbc54aabe2b1ae287042d7bdb80c08edede19f0c0afb36ae586f37", size = 16056117, upload-time = "2025-10-15T16:16:23.369Z" }, + { url = "https://files.pythonhosted.org/packages/b1/0e/42cb5e69ea901e06ce24bfcc4b5664a56f950a70efdcf221f30d9615f3f3/numpy-2.3.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ecb63014bb7f4ce653f8be7f1df8cbc6093a5a2811211770f6606cc92b5a78fd", size = 18577788, upload-time = "2025-10-15T16:16:27.496Z" }, + { url = "https://files.pythonhosted.org/packages/86/92/41c3d5157d3177559ef0a35da50f0cda7fa071f4ba2306dd36818591a5bc/numpy-2.3.4-cp313-cp313-win32.whl", hash = "sha256:e8370eb6925bb8c1c4264fec52b0384b44f675f191df91cbe0140ec9f0955646", size = 6282620, upload-time = "2025-10-15T16:16:29.811Z" }, + { url = "https://files.pythonhosted.org/packages/09/97/fd421e8bc50766665ad35536c2bb4ef916533ba1fdd053a62d96cc7c8b95/numpy-2.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:56209416e81a7893036eea03abcb91c130643eb14233b2515c90dcac963fe99d", size = 12784672, upload-time = "2025-10-15T16:16:31.589Z" }, + { url = "https://files.pythonhosted.org/packages/ad/df/5474fb2f74970ca8eb978093969b125a84cc3d30e47f82191f981f13a8a0/numpy-2.3.4-cp313-cp313-win_arm64.whl", hash = "sha256:a700a4031bc0fd6936e78a752eefb79092cecad2599ea9c8039c548bc097f9bc", size = 10196702, upload-time = "2025-10-15T16:16:33.902Z" }, + { url = "https://files.pythonhosted.org/packages/11/83/66ac031464ec1767ea3ed48ce40f615eb441072945e98693bec0bcd056cc/numpy-2.3.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:86966db35c4040fdca64f0816a1c1dd8dbd027d90fca5a57e00e1ca4cd41b879", size = 21049003, upload-time = "2025-10-15T16:16:36.101Z" }, + { url = "https://files.pythonhosted.org/packages/5f/99/5b14e0e686e61371659a1d5bebd04596b1d72227ce36eed121bb0aeab798/numpy-2.3.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:838f045478638b26c375ee96ea89464d38428c69170360b23a1a50fa4baa3562", size = 14302980, upload-time = "2025-10-15T16:16:39.124Z" }, + { url = "https://files.pythonhosted.org/packages/2c/44/e9486649cd087d9fc6920e3fc3ac2aba10838d10804b1e179fb7cbc4e634/numpy-2.3.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d7315ed1dab0286adca467377c8381cd748f3dc92235f22a7dfc42745644a96a", size = 5231472, upload-time = "2025-10-15T16:16:41.168Z" }, + { url = "https://files.pythonhosted.org/packages/3e/51/902b24fa8887e5fe2063fd61b1895a476d0bbf46811ab0c7fdf4bd127345/numpy-2.3.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:84f01a4d18b2cc4ade1814a08e5f3c907b079c847051d720fad15ce37aa930b6", size = 6739342, upload-time = "2025-10-15T16:16:43.777Z" }, + { url = "https://files.pythonhosted.org/packages/34/f1/4de9586d05b1962acdcdb1dc4af6646361a643f8c864cef7c852bf509740/numpy-2.3.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:817e719a868f0dacde4abdfc5c1910b301877970195db9ab6a5e2c4bd5b121f7", size = 14354338, upload-time = "2025-10-15T16:16:46.081Z" }, + { url = "https://files.pythonhosted.org/packages/1f/06/1c16103b425de7969d5a76bdf5ada0804b476fed05d5f9e17b777f1cbefd/numpy-2.3.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85e071da78d92a214212cacea81c6da557cab307f2c34b5f85b628e94803f9c0", size = 16702392, upload-time = "2025-10-15T16:16:48.455Z" }, + { url = "https://files.pythonhosted.org/packages/34/b2/65f4dc1b89b5322093572b6e55161bb42e3e0487067af73627f795cc9d47/numpy-2.3.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2ec646892819370cf3558f518797f16597b4e4669894a2ba712caccc9da53f1f", size = 16134998, upload-time = "2025-10-15T16:16:51.114Z" }, + { url = "https://files.pythonhosted.org/packages/d4/11/94ec578896cdb973aaf56425d6c7f2aff4186a5c00fac15ff2ec46998b46/numpy-2.3.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:035796aaaddfe2f9664b9a9372f089cfc88bd795a67bd1bfe15e6e770934cf64", size = 18651574, upload-time = "2025-10-15T16:16:53.429Z" }, + { url = "https://files.pythonhosted.org/packages/62/b7/7efa763ab33dbccf56dade36938a77345ce8e8192d6b39e470ca25ff3cd0/numpy-2.3.4-cp313-cp313t-win32.whl", hash = "sha256:fea80f4f4cf83b54c3a051f2f727870ee51e22f0248d3114b8e755d160b38cfb", size = 6413135, upload-time = "2025-10-15T16:16:55.992Z" }, + { url = "https://files.pythonhosted.org/packages/43/70/aba4c38e8400abcc2f345e13d972fb36c26409b3e644366db7649015f291/numpy-2.3.4-cp313-cp313t-win_amd64.whl", hash = "sha256:15eea9f306b98e0be91eb344a94c0e630689ef302e10c2ce5f7e11905c704f9c", size = 12928582, upload-time = "2025-10-15T16:16:57.943Z" }, + { url = "https://files.pythonhosted.org/packages/67/63/871fad5f0073fc00fbbdd7232962ea1ac40eeaae2bba66c76214f7954236/numpy-2.3.4-cp313-cp313t-win_arm64.whl", hash = "sha256:b6c231c9c2fadbae4011ca5e7e83e12dc4a5072f1a1d85a0a7b3ed754d145a40", size = 10266691, upload-time = "2025-10-15T16:17:00.048Z" }, + { url = "https://files.pythonhosted.org/packages/72/71/ae6170143c115732470ae3a2d01512870dd16e0953f8a6dc89525696069b/numpy-2.3.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:81c3e6d8c97295a7360d367f9f8553973651b76907988bb6066376bc2252f24e", size = 20955580, upload-time = "2025-10-15T16:17:02.509Z" }, + { url = "https://files.pythonhosted.org/packages/af/39/4be9222ffd6ca8a30eda033d5f753276a9c3426c397bb137d8e19dedd200/numpy-2.3.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7c26b0b2bf58009ed1f38a641f3db4be8d960a417ca96d14e5b06df1506d41ff", size = 14188056, upload-time = "2025-10-15T16:17:04.873Z" }, + { url = "https://files.pythonhosted.org/packages/6c/3d/d85f6700d0a4aa4f9491030e1021c2b2b7421b2b38d01acd16734a2bfdc7/numpy-2.3.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:62b2198c438058a20b6704351b35a1d7db881812d8512d67a69c9de1f18ca05f", size = 5116555, upload-time = "2025-10-15T16:17:07.499Z" }, + { url = "https://files.pythonhosted.org/packages/bf/04/82c1467d86f47eee8a19a464c92f90a9bb68ccf14a54c5224d7031241ffb/numpy-2.3.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:9d729d60f8d53a7361707f4b68a9663c968882dd4f09e0d58c044c8bf5faee7b", size = 6643581, upload-time = "2025-10-15T16:17:09.774Z" }, + { url = "https://files.pythonhosted.org/packages/0c/d3/c79841741b837e293f48bd7db89d0ac7a4f2503b382b78a790ef1dc778a5/numpy-2.3.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd0c630cf256b0a7fd9d0a11c9413b42fef5101219ce6ed5a09624f5a65392c7", size = 14299186, upload-time = "2025-10-15T16:17:11.937Z" }, + { url = "https://files.pythonhosted.org/packages/e8/7e/4a14a769741fbf237eec5a12a2cbc7a4c4e061852b6533bcb9e9a796c908/numpy-2.3.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5e081bc082825f8b139f9e9fe42942cb4054524598aaeb177ff476cc76d09d2", size = 16638601, upload-time = "2025-10-15T16:17:14.391Z" }, + { url = "https://files.pythonhosted.org/packages/93/87/1c1de269f002ff0a41173fe01dcc925f4ecff59264cd8f96cf3b60d12c9b/numpy-2.3.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:15fb27364ed84114438fff8aaf998c9e19adbeba08c0b75409f8c452a8692c52", size = 16074219, upload-time = "2025-10-15T16:17:17.058Z" }, + { url = "https://files.pythonhosted.org/packages/cd/28/18f72ee77408e40a76d691001ae599e712ca2a47ddd2c4f695b16c65f077/numpy-2.3.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:85d9fb2d8cd998c84d13a79a09cc0c1091648e848e4e6249b0ccd7f6b487fa26", size = 18576702, upload-time = "2025-10-15T16:17:19.379Z" }, + { url = "https://files.pythonhosted.org/packages/c3/76/95650169b465ececa8cf4b2e8f6df255d4bf662775e797ade2025cc51ae6/numpy-2.3.4-cp314-cp314-win32.whl", hash = "sha256:e73d63fd04e3a9d6bc187f5455d81abfad05660b212c8804bf3b407e984cd2bc", size = 6337136, upload-time = "2025-10-15T16:17:22.886Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/a231a5c43ede5d6f77ba4a91e915a87dea4aeea76560ba4d2bf185c683f0/numpy-2.3.4-cp314-cp314-win_amd64.whl", hash = "sha256:3da3491cee49cf16157e70f607c03a217ea6647b1cea4819c4f48e53d49139b9", size = 12920542, upload-time = "2025-10-15T16:17:24.783Z" }, + { url = "https://files.pythonhosted.org/packages/0d/0c/ae9434a888f717c5ed2ff2393b3f344f0ff6f1c793519fa0c540461dc530/numpy-2.3.4-cp314-cp314-win_arm64.whl", hash = "sha256:6d9cd732068e8288dbe2717177320723ccec4fb064123f0caf9bbd90ab5be868", size = 10480213, upload-time = "2025-10-15T16:17:26.935Z" }, + { url = "https://files.pythonhosted.org/packages/83/4b/c4a5f0841f92536f6b9592694a5b5f68c9ab37b775ff342649eadf9055d3/numpy-2.3.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:22758999b256b595cf0b1d102b133bb61866ba5ceecf15f759623b64c020c9ec", size = 21052280, upload-time = "2025-10-15T16:17:29.638Z" }, + { url = "https://files.pythonhosted.org/packages/3e/80/90308845fc93b984d2cc96d83e2324ce8ad1fd6efea81b324cba4b673854/numpy-2.3.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9cb177bc55b010b19798dc5497d540dea67fd13a8d9e882b2dae71de0cf09eb3", size = 14302930, upload-time = "2025-10-15T16:17:32.384Z" }, + { url = "https://files.pythonhosted.org/packages/3d/4e/07439f22f2a3b247cec4d63a713faae55e1141a36e77fb212881f7cda3fb/numpy-2.3.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0f2bcc76f1e05e5ab58893407c63d90b2029908fa41f9f1cc51eecce936c3365", size = 5231504, upload-time = "2025-10-15T16:17:34.515Z" }, + { url = "https://files.pythonhosted.org/packages/ab/de/1e11f2547e2fe3d00482b19721855348b94ada8359aef5d40dd57bfae9df/numpy-2.3.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dc20bde86802df2ed8397a08d793da0ad7a5fd4ea3ac85d757bf5dd4ad7c252", size = 6739405, upload-time = "2025-10-15T16:17:36.128Z" }, + { url = "https://files.pythonhosted.org/packages/3b/40/8cd57393a26cebe2e923005db5134a946c62fa56a1087dc7c478f3e30837/numpy-2.3.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e199c087e2aa71c8f9ce1cb7a8e10677dc12457e7cc1be4798632da37c3e86e", size = 14354866, upload-time = "2025-10-15T16:17:38.884Z" }, + { url = "https://files.pythonhosted.org/packages/93/39/5b3510f023f96874ee6fea2e40dfa99313a00bf3ab779f3c92978f34aace/numpy-2.3.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85597b2d25ddf655495e2363fe044b0ae999b75bc4d630dc0d886484b03a5eb0", size = 16703296, upload-time = "2025-10-15T16:17:41.564Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/19bb163617c8045209c1996c4e427bccbc4bbff1e2c711f39203c8ddbb4a/numpy-2.3.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04a69abe45b49c5955923cf2c407843d1c85013b424ae8a560bba16c92fe44a0", size = 16136046, upload-time = "2025-10-15T16:17:43.901Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c1/6dba12fdf68b02a21ac411c9df19afa66bed2540f467150ca64d246b463d/numpy-2.3.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e1708fac43ef8b419c975926ce1eaf793b0c13b7356cfab6ab0dc34c0a02ac0f", size = 18652691, upload-time = "2025-10-15T16:17:46.247Z" }, + { url = "https://files.pythonhosted.org/packages/f8/73/f85056701dbbbb910c51d846c58d29fd46b30eecd2b6ba760fc8b8a1641b/numpy-2.3.4-cp314-cp314t-win32.whl", hash = "sha256:863e3b5f4d9915aaf1b8ec79ae560ad21f0b8d5e3adc31e73126491bb86dee1d", size = 6485782, upload-time = "2025-10-15T16:17:48.872Z" }, + { url = "https://files.pythonhosted.org/packages/17/90/28fa6f9865181cb817c2471ee65678afa8a7e2a1fb16141473d5fa6bacc3/numpy-2.3.4-cp314-cp314t-win_amd64.whl", hash = "sha256:962064de37b9aef801d33bc579690f8bfe6c5e70e29b61783f60bcba838a14d6", size = 13113301, upload-time = "2025-10-15T16:17:50.938Z" }, + { url = "https://files.pythonhosted.org/packages/54/23/08c002201a8e7e1f9afba93b97deceb813252d9cfd0d3351caed123dcf97/numpy-2.3.4-cp314-cp314t-win_arm64.whl", hash = "sha256:8b5a9a39c45d852b62693d9b3f3e0fe052541f804296ff401a72a1b60edafb29", size = 10547532, upload-time = "2025-10-15T16:17:53.48Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b6/64898f51a86ec88ca1257a59c1d7fd077b60082a119affefcdf1dd0df8ca/numpy-2.3.4-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6e274603039f924c0fe5cb73438fa9246699c78a6df1bd3decef9ae592ae1c05", size = 21131552, upload-time = "2025-10-15T16:17:55.845Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4c/f135dc6ebe2b6a3c77f4e4838fa63d350f85c99462012306ada1bd4bc460/numpy-2.3.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d149aee5c72176d9ddbc6803aef9c0f6d2ceeea7626574fc68518da5476fa346", size = 14377796, upload-time = "2025-10-15T16:17:58.308Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a4/f33f9c23fcc13dd8412fc8614559b5b797e0aba9d8e01dfa8bae10c84004/numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:6d34ed9db9e6395bb6cd33286035f73a59b058169733a9db9f85e650b88df37e", size = 5306904, upload-time = "2025-10-15T16:18:00.596Z" }, + { url = "https://files.pythonhosted.org/packages/28/af/c44097f25f834360f9fb960fa082863e0bad14a42f36527b2a121abdec56/numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:fdebe771ca06bb8d6abce84e51dca9f7921fe6ad34a0c914541b063e9a68928b", size = 6819682, upload-time = "2025-10-15T16:18:02.32Z" }, + { url = "https://files.pythonhosted.org/packages/c5/8c/cd283b54c3c2b77e188f63e23039844f56b23bba1712318288c13fe86baf/numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e92defe6c08211eb77902253b14fe5b480ebc5112bc741fd5e9cd0608f847", size = 14422300, upload-time = "2025-10-15T16:18:04.271Z" }, + { url = "https://files.pythonhosted.org/packages/b0/f0/8404db5098d92446b3e3695cf41c6f0ecb703d701cb0b7566ee2177f2eee/numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13b9062e4f5c7ee5c7e5be96f29ba71bc5a37fed3d1d77c37390ae00724d296d", size = 16760806, upload-time = "2025-10-15T16:18:06.668Z" }, + { url = "https://files.pythonhosted.org/packages/95/8e/2844c3959ce9a63acc7c8e50881133d86666f0420bcde695e115ced0920f/numpy-2.3.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:81b3a59793523e552c4a96109dde028aa4448ae06ccac5a76ff6532a85558a7f", size = 12973130, upload-time = "2025-10-15T16:18:09.397Z" }, ] [[package]] @@ -1022,6 +1242,61 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/28/01/d6b274a0635be0468d4dbd9cafe80c47105937a0d42434e805e67cd2ed8b/orjson-3.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:e8f6a7a27d7b7bec81bd5924163e9af03d49bbb63013f107b48eb5d16db711bc", size = 125985, upload-time = "2025-08-26T17:46:16.67Z" }, ] +[[package]] +name = "ormsgpack" +version = "1.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/f8/224c342c0e03e131aaa1a1f19aa2244e167001783a433f4eed10eedd834b/ormsgpack-1.11.0.tar.gz", hash = "sha256:7c9988e78fedba3292541eb3bb274fa63044ef4da2ddb47259ea70c05dee4206", size = 49357, upload-time = "2025-10-08T17:29:15.621Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/3d/6996193cb2babc47fc92456223bef7d141065357ad4204eccf313f47a7b3/ormsgpack-1.11.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:03d4e658dd6e1882a552ce1d13cc7b49157414e7d56a4091fbe7823225b08cba", size = 367965, upload-time = "2025-10-08T17:28:06.736Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/c83b805dd9caebb046f4ceeed3706d0902ed2dbbcf08b8464e89f2c52e05/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb67eb913c2b703f0ed39607fc56e50724dd41f92ce080a586b4d6149eb3fe4", size = 195209, upload-time = "2025-10-08T17:28:08.395Z" }, + { url = "https://files.pythonhosted.org/packages/3a/17/427d9c4f77b120f0af01d7a71d8144771c9388c2a81f712048320e31353b/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e54175b92411f73a238e5653a998627f6660de3def37d9dd7213e0fd264ca56", size = 205868, upload-time = "2025-10-08T17:28:09.688Z" }, + { url = "https://files.pythonhosted.org/packages/82/32/a9ce218478bdbf3fee954159900e24b314ab3064f7b6a217ccb1e3464324/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca2b197f4556e1823d1319869d4c5dc278be335286d2308b0ed88b59a5afcc25", size = 207391, upload-time = "2025-10-08T17:28:11.031Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d3/4413fe7454711596fdf08adabdfa686580e4656702015108e4975f00a022/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc62388262f58c792fe1e450e1d9dbcc174ed2fb0b43db1675dd7c5ff2319d6a", size = 377078, upload-time = "2025-10-08T17:28:12.39Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ad/13fae555a45e35ca1ca929a27c9ee0a3ecada931b9d44454658c543f9b9c/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c48bc10af74adfbc9113f3fb160dc07c61ad9239ef264c17e449eba3de343dc2", size = 470776, upload-time = "2025-10-08T17:28:13.484Z" }, + { url = "https://files.pythonhosted.org/packages/36/60/51178b093ffc4e2ef3381013a67223e7d56224434fba80047249f4a84b26/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a608d3a1d4fa4acdc5082168a54513cff91f47764cef435e81a483452f5f7647", size = 380862, upload-time = "2025-10-08T17:28:14.747Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e3/1cb6c161335e2ae7d711ecfb007a31a3936603626e347c13e5e53b7c7cf8/ormsgpack-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:97217b4f7f599ba45916b9c4c4b1d5656e8e2a4d91e2e191d72a7569d3c30923", size = 112058, upload-time = "2025-10-08T17:28:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/90164d00e8e94b48eff8a17bc2f4be6b71ae356a00904bc69d5e8afe80fb/ormsgpack-1.11.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c7be823f47d8e36648d4bc90634b93f02b7d7cc7480081195f34767e86f181fb", size = 367964, upload-time = "2025-10-08T17:28:16.778Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c2/fb6331e880a3446c1341e72c77bd5a46da3e92a8e2edf7ea84a4c6c14fff/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68accf15d1b013812755c0eb7a30e1fc2f81eb603a1a143bf0cda1b301cfa797", size = 195209, upload-time = "2025-10-08T17:28:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/50/4943fb5df8cc02da6b7b1ee2c2a7fb13aebc9f963d69280b1bb02b1fb178/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:805d06fb277d9a4e503c0c707545b49cde66cbb2f84e5cf7c58d81dfc20d8658", size = 205869, upload-time = "2025-10-08T17:28:19.01Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fa/e7e06835bfea9adeef43915143ce818098aecab0cbd3df584815adf3e399/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1e57cdf003e77acc43643bda151dc01f97147a64b11cdee1380bb9698a7601c", size = 207391, upload-time = "2025-10-08T17:28:20.352Z" }, + { url = "https://files.pythonhosted.org/packages/33/f0/f28a19e938a14ec223396e94f4782fbcc023f8c91f2ab6881839d3550f32/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:37fc05bdaabd994097c62e2f3e08f66b03f856a640ede6dc5ea340bd15b77f4d", size = 377081, upload-time = "2025-10-08T17:28:21.926Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e3/73d1d7287637401b0b6637e30ba9121e1aa1d9f5ea185ed9834ca15d512c/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a6e9db6c73eb46b2e4d97bdffd1368a66f54e6806b563a997b19c004ef165e1d", size = 470779, upload-time = "2025-10-08T17:28:22.993Z" }, + { url = "https://files.pythonhosted.org/packages/9c/46/7ba7f9721e766dd0dfe4cedf444439447212abffe2d2f4538edeeec8ccbd/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9c44eae5ac0196ffc8b5ed497c75511056508f2303fa4d36b208eb820cf209e", size = 380865, upload-time = "2025-10-08T17:28:24.012Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7d/bb92a0782bbe0626c072c0320001410cf3f6743ede7dc18f034b1a18edef/ormsgpack-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:11d0dfaf40ae7c6de4f7dbd1e4892e2e6a55d911ab1774357c481158d17371e4", size = 112058, upload-time = "2025-10-08T17:28:25.015Z" }, + { url = "https://files.pythonhosted.org/packages/28/1a/f07c6f74142815d67e1d9d98c5b2960007100408ade8242edac96d5d1c73/ormsgpack-1.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:0c63a3f7199a3099c90398a1bdf0cb577b06651a442dc5efe67f2882665e5b02", size = 105894, upload-time = "2025-10-08T17:28:25.93Z" }, + { url = "https://files.pythonhosted.org/packages/1e/16/2805ebfb3d2cbb6c661b5fae053960fc90a2611d0d93e2207e753e836117/ormsgpack-1.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3434d0c8d67de27d9010222de07fb6810fb9af3bb7372354ffa19257ac0eb83b", size = 368474, upload-time = "2025-10-08T17:28:27.532Z" }, + { url = "https://files.pythonhosted.org/packages/6f/39/6afae47822dca0ce4465d894c0bbb860a850ce29c157882dbdf77a5dd26e/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2da5bd097e8dbfa4eb0d4ccfe79acd6f538dee4493579e2debfe4fc8f4ca89b", size = 195321, upload-time = "2025-10-08T17:28:28.573Z" }, + { url = "https://files.pythonhosted.org/packages/f6/54/11eda6b59f696d2f16de469bfbe539c9f469c4b9eef5a513996b5879c6e9/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fdbaa0a5a8606a486960b60c24f2d5235d30ac7a8b98eeaea9854bffef14dc3d", size = 206036, upload-time = "2025-10-08T17:28:29.785Z" }, + { url = "https://files.pythonhosted.org/packages/1e/86/890430f704f84c4699ddad61c595d171ea2fd77a51fbc106f83981e83939/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3682f24f800c1837017ee90ce321086b2cbaef88db7d4cdbbda1582aa6508159", size = 207615, upload-time = "2025-10-08T17:28:31.076Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b9/77383e16c991c0ecb772205b966fc68d9c519e0b5f9c3913283cbed30ffe/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fcca21202bb05ccbf3e0e92f560ee59b9331182e4c09c965a28155efbb134993", size = 377195, upload-time = "2025-10-08T17:28:32.436Z" }, + { url = "https://files.pythonhosted.org/packages/20/e2/15f9f045d4947f3c8a5e0535259fddf027b17b1215367488b3565c573b9d/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c30e5c4655ba46152d722ec7468e8302195e6db362ec1ae2c206bc64f6030e43", size = 470960, upload-time = "2025-10-08T17:28:33.556Z" }, + { url = "https://files.pythonhosted.org/packages/b8/61/403ce188c4c495bc99dff921a0ad3d9d352dd6d3c4b629f3638b7f0cf79b/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7138a341f9e2c08c59368f03d3be25e8b87b3baaf10d30fb1f6f6b52f3d47944", size = 381174, upload-time = "2025-10-08T17:28:34.781Z" }, + { url = "https://files.pythonhosted.org/packages/14/a8/94c94bc48c68da4374870a851eea03fc5a45eb041182ad4c5ed9acfc05a4/ormsgpack-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d4bd8589b78a11026d47f4edf13c1ceab9088bb12451f34396afe6497db28a27", size = 112314, upload-time = "2025-10-08T17:28:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/19/d0/aa4cf04f04e4cc180ce7a8d8ddb5a7f3af883329cbc59645d94d3ba157a5/ormsgpack-1.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:e5e746a1223e70f111d4001dab9585ac8639eee8979ca0c8db37f646bf2961da", size = 106072, upload-time = "2025-10-08T17:28:37.518Z" }, + { url = "https://files.pythonhosted.org/packages/8b/35/e34722edb701d053cf2240f55974f17b7dbfd11fdef72bd2f1835bcebf26/ormsgpack-1.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e7b36ab7b45cb95217ae1f05f1318b14a3e5ef73cb00804c0f06233f81a14e8", size = 368502, upload-time = "2025-10-08T17:28:38.547Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6a/c2fc369a79d6aba2aa28c8763856c95337ac7fcc0b2742185cd19397212a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43402d67e03a9a35cc147c8c03f0c377cad016624479e1ee5b879b8425551484", size = 195344, upload-time = "2025-10-08T17:28:39.554Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6a/0f8e24b7489885534c1a93bdba7c7c434b9b8638713a68098867db9f254c/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:64fd992f932764d6306b70ddc755c1bc3405c4c6a69f77a36acf7af1c8f5ada4", size = 206045, upload-time = "2025-10-08T17:28:40.561Z" }, + { url = "https://files.pythonhosted.org/packages/99/71/8b460ba264f3c6f82ef5b1920335720094e2bd943057964ce5287d6df83a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0362fb7fe4a29c046c8ea799303079a09372653a1ce5a5a588f3bbb8088368d0", size = 207641, upload-time = "2025-10-08T17:28:41.736Z" }, + { url = "https://files.pythonhosted.org/packages/50/cf/f369446abaf65972424ed2651f2df2b7b5c3b735c93fc7fa6cfb81e34419/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:de2f7a65a9d178ed57be49eba3d0fc9b833c32beaa19dbd4ba56014d3c20b152", size = 377211, upload-time = "2025-10-08T17:28:43.12Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3f/948bb0047ce0f37c2efc3b9bb2bcfdccc61c63e0b9ce8088d4903ba39dcf/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:f38cfae95461466055af966fc922d06db4e1654966385cda2828653096db34da", size = 470973, upload-time = "2025-10-08T17:28:44.465Z" }, + { url = "https://files.pythonhosted.org/packages/31/a4/92a8114d1d017c14aaa403445060f345df9130ca532d538094f38e535988/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c88396189d238f183cea7831b07a305ab5c90d6d29b53288ae11200bd956357b", size = 381161, upload-time = "2025-10-08T17:28:46.063Z" }, + { url = "https://files.pythonhosted.org/packages/d0/64/5b76447da654798bfcfdfd64ea29447ff2b7f33fe19d0e911a83ad5107fc/ormsgpack-1.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:5403d1a945dd7c81044cebeca3f00a28a0f4248b33242a5d2d82111628043725", size = 112321, upload-time = "2025-10-08T17:28:47.393Z" }, + { url = "https://files.pythonhosted.org/packages/46/5e/89900d06db9ab81e7ec1fd56a07c62dfbdcda398c435718f4252e1dc52a0/ormsgpack-1.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:c57357b8d43b49722b876edf317bdad9e6d52071b523fdd7394c30cd1c67d5a0", size = 106084, upload-time = "2025-10-08T17:28:48.305Z" }, + { url = "https://files.pythonhosted.org/packages/4c/0b/c659e8657085c8c13f6a0224789f422620cef506e26573b5434defe68483/ormsgpack-1.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:d390907d90fd0c908211592c485054d7a80990697ef4dff4e436ac18e1aab98a", size = 368497, upload-time = "2025-10-08T17:28:49.297Z" }, + { url = "https://files.pythonhosted.org/packages/1b/0e/451e5848c7ed56bd287e8a2b5cb5926e54466f60936e05aec6cb299f9143/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6153c2e92e789509098e04c9aa116b16673bd88ec78fbe0031deeb34ab642d10", size = 195385, upload-time = "2025-10-08T17:28:50.314Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/90f78cbbe494959f2439c2ec571f08cd3464c05a6a380b0d621c622122a9/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2b2c2a065a94d742212b2018e1fecd8f8d72f3c50b53a97d1f407418093446d", size = 206114, upload-time = "2025-10-08T17:28:51.336Z" }, + { url = "https://files.pythonhosted.org/packages/fb/db/34163f4c0923bea32dafe42cd878dcc66795a3e85669bc4b01c1e2b92a7b/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:110e65b5340f3d7ef8b0009deae3c6b169437e6b43ad5a57fd1748085d29d2ac", size = 207679, upload-time = "2025-10-08T17:28:53.627Z" }, + { url = "https://files.pythonhosted.org/packages/b6/14/04ee741249b16f380a9b4a0cc19d4134d0b7c74bab27a2117da09e525eb9/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c27e186fca96ab34662723e65b420919910acbbc50fc8e1a44e08f26268cb0e0", size = 377237, upload-time = "2025-10-08T17:28:56.12Z" }, + { url = "https://files.pythonhosted.org/packages/89/ff/53e588a6aaa833237471caec679582c2950f0e7e1a8ba28c1511b465c1f4/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d56b1f877c13d499052d37a3db2378a97d5e1588d264f5040b3412aee23d742c", size = 471021, upload-time = "2025-10-08T17:28:57.299Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f9/f20a6d9ef2be04da3aad05e8f5699957e9a30c6d5c043a10a296afa7e890/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c88e28cd567c0a3269f624b4ade28142d5e502c8e826115093c572007af5be0a", size = 381205, upload-time = "2025-10-08T17:28:58.872Z" }, + { url = "https://files.pythonhosted.org/packages/f8/64/96c07d084b479ac8b7821a77ffc8d3f29d8b5c95ebfdf8db1c03dff02762/ormsgpack-1.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:8811160573dc0a65f62f7e0792c4ca6b7108dfa50771edb93f9b84e2d45a08ae", size = 112374, upload-time = "2025-10-08T17:29:00Z" }, + { url = "https://files.pythonhosted.org/packages/88/a5/5dcc18b818d50213a3cadfe336bb6163a102677d9ce87f3d2f1a1bee0f8c/ormsgpack-1.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:23e30a8d3c17484cf74e75e6134322255bd08bc2b5b295cc9c442f4bae5f3c2d", size = 106056, upload-time = "2025-10-08T17:29:01.29Z" }, + { url = "https://files.pythonhosted.org/packages/19/2b/776d1b411d2be50f77a6e6e94a25825cca55dcacfe7415fd691a144db71b/ormsgpack-1.11.0-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2905816502adfaf8386a01dd85f936cd378d243f4f5ee2ff46f67f6298dc90d5", size = 368661, upload-time = "2025-10-08T17:29:02.382Z" }, + { url = "https://files.pythonhosted.org/packages/a9/0c/81a19e6115b15764db3d241788f9fac093122878aaabf872cc545b0c4650/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c04402fb9a0a9b9f18fbafd6d5f8398ee99b3ec619fb63952d3a954bc9d47daa", size = 195539, upload-time = "2025-10-08T17:29:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/97/86/e5b50247a61caec5718122feb2719ea9d451d30ac0516c288c1dbc6408e8/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a025ec07ac52056ecfd9e57b5cbc6fff163f62cb9805012b56cda599157f8ef2", size = 207718, upload-time = "2025-10-08T17:29:04.545Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -1051,91 +1326,116 @@ wheels = [ [[package]] name = "propcache" -version = "0.3.2" +version = "0.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/14/510deed325e262afeb8b360043c5d7c960da7d3ecd6d6f9496c9c56dc7f4/propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770", size = 73178, upload-time = "2025-06-09T22:53:40.126Z" }, - { url = "https://files.pythonhosted.org/packages/cd/4e/ad52a7925ff01c1325653a730c7ec3175a23f948f08626a534133427dcff/propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3", size = 43133, upload-time = "2025-06-09T22:53:41.965Z" }, - { url = "https://files.pythonhosted.org/packages/63/7c/e9399ba5da7780871db4eac178e9c2e204c23dd3e7d32df202092a1ed400/propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3", size = 43039, upload-time = "2025-06-09T22:53:43.268Z" }, - { url = "https://files.pythonhosted.org/packages/22/e1/58da211eb8fdc6fc854002387d38f415a6ca5f5c67c1315b204a5d3e9d7a/propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e", size = 201903, upload-time = "2025-06-09T22:53:44.872Z" }, - { url = "https://files.pythonhosted.org/packages/c4/0a/550ea0f52aac455cb90111c8bab995208443e46d925e51e2f6ebdf869525/propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220", size = 213362, upload-time = "2025-06-09T22:53:46.707Z" }, - { url = "https://files.pythonhosted.org/packages/5a/af/9893b7d878deda9bb69fcf54600b247fba7317761b7db11fede6e0f28bd0/propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb", size = 210525, upload-time = "2025-06-09T22:53:48.547Z" }, - { url = "https://files.pythonhosted.org/packages/7c/bb/38fd08b278ca85cde36d848091ad2b45954bc5f15cce494bb300b9285831/propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614", size = 198283, upload-time = "2025-06-09T22:53:50.067Z" }, - { url = "https://files.pythonhosted.org/packages/78/8c/9fe55bd01d362bafb413dfe508c48753111a1e269737fa143ba85693592c/propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50", size = 191872, upload-time = "2025-06-09T22:53:51.438Z" }, - { url = "https://files.pythonhosted.org/packages/54/14/4701c33852937a22584e08abb531d654c8bcf7948a8f87ad0a4822394147/propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339", size = 199452, upload-time = "2025-06-09T22:53:53.229Z" }, - { url = "https://files.pythonhosted.org/packages/16/44/447f2253d859602095356007657ee535e0093215ea0b3d1d6a41d16e5201/propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0", size = 191567, upload-time = "2025-06-09T22:53:54.541Z" }, - { url = "https://files.pythonhosted.org/packages/f2/b3/e4756258749bb2d3b46defcff606a2f47410bab82be5824a67e84015b267/propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2", size = 193015, upload-time = "2025-06-09T22:53:56.44Z" }, - { url = "https://files.pythonhosted.org/packages/1e/df/e6d3c7574233164b6330b9fd697beeac402afd367280e6dc377bb99b43d9/propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7", size = 204660, upload-time = "2025-06-09T22:53:57.839Z" }, - { url = "https://files.pythonhosted.org/packages/b2/53/e4d31dd5170b4a0e2e6b730f2385a96410633b4833dc25fe5dffd1f73294/propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b", size = 206105, upload-time = "2025-06-09T22:53:59.638Z" }, - { url = "https://files.pythonhosted.org/packages/7f/fe/74d54cf9fbe2a20ff786e5f7afcfde446588f0cf15fb2daacfbc267b866c/propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c", size = 196980, upload-time = "2025-06-09T22:54:01.071Z" }, - { url = "https://files.pythonhosted.org/packages/22/ec/c469c9d59dada8a7679625e0440b544fe72e99311a4679c279562051f6fc/propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70", size = 37679, upload-time = "2025-06-09T22:54:03.003Z" }, - { url = "https://files.pythonhosted.org/packages/38/35/07a471371ac89d418f8d0b699c75ea6dca2041fbda360823de21f6a9ce0a/propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9", size = 41459, upload-time = "2025-06-09T22:54:04.134Z" }, - { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, - { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, - { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, - { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, - { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, - { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, - { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, - { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, - { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, - { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, - { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, - { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, - { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, - { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, - { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, - { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, - { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, - { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, - { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, - { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, - { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, - { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, - { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, - { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, - { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, - { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, - { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, - { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, - { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, - { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, - { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, - { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, - { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, - { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, - { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, - { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, - { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, - { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, - { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, - { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, - { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, - { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, - { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, - { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, - { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, - { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, - { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, - { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, - { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, - { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, - { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, - { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, - { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, - { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, - { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, - { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0e/934b541323035566a9af292dba85a195f7b78179114f2c6ebb24551118a9/propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db", size = 79534, upload-time = "2025-10-08T19:46:02.083Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6b/db0d03d96726d995dc7171286c6ba9d8d14251f37433890f88368951a44e/propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8", size = 45526, upload-time = "2025-10-08T19:46:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c3/82728404aea669e1600f304f2609cde9e665c18df5a11cdd57ed73c1dceb/propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925", size = 47263, upload-time = "2025-10-08T19:46:05.405Z" }, + { url = "https://files.pythonhosted.org/packages/df/1b/39313ddad2bf9187a1432654c38249bab4562ef535ef07f5eb6eb04d0b1b/propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21", size = 201012, upload-time = "2025-10-08T19:46:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/5b/01/f1d0b57d136f294a142acf97f4ed58c8e5b974c21e543000968357115011/propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5", size = 209491, upload-time = "2025-10-08T19:46:08.909Z" }, + { url = "https://files.pythonhosted.org/packages/a1/c8/038d909c61c5bb039070b3fb02ad5cccdb1dde0d714792e251cdb17c9c05/propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db", size = 215319, upload-time = "2025-10-08T19:46:10.7Z" }, + { url = "https://files.pythonhosted.org/packages/08/57/8c87e93142b2c1fa2408e45695205a7ba05fb5db458c0bf5c06ba0e09ea6/propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7", size = 196856, upload-time = "2025-10-08T19:46:12.003Z" }, + { url = "https://files.pythonhosted.org/packages/42/df/5615fec76aa561987a534759b3686008a288e73107faa49a8ae5795a9f7a/propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4", size = 193241, upload-time = "2025-10-08T19:46:13.495Z" }, + { url = "https://files.pythonhosted.org/packages/d5/21/62949eb3a7a54afe8327011c90aca7e03547787a88fb8bd9726806482fea/propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60", size = 190552, upload-time = "2025-10-08T19:46:14.938Z" }, + { url = "https://files.pythonhosted.org/packages/30/ee/ab4d727dd70806e5b4de96a798ae7ac6e4d42516f030ee60522474b6b332/propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f", size = 200113, upload-time = "2025-10-08T19:46:16.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0b/38b46208e6711b016aa8966a3ac793eee0d05c7159d8342aa27fc0bc365e/propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900", size = 200778, upload-time = "2025-10-08T19:46:18.023Z" }, + { url = "https://files.pythonhosted.org/packages/cf/81/5abec54355ed344476bee711e9f04815d4b00a311ab0535599204eecc257/propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c", size = 193047, upload-time = "2025-10-08T19:46:19.449Z" }, + { url = "https://files.pythonhosted.org/packages/ec/b6/1f237c04e32063cb034acd5f6ef34ef3a394f75502e72703545631ab1ef6/propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb", size = 38093, upload-time = "2025-10-08T19:46:20.643Z" }, + { url = "https://files.pythonhosted.org/packages/a6/67/354aac4e0603a15f76439caf0427781bcd6797f370377f75a642133bc954/propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37", size = 41638, upload-time = "2025-10-08T19:46:21.935Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e1/74e55b9fd1a4c209ff1a9a824bf6c8b3d1fc5a1ac3eabe23462637466785/propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581", size = 38229, upload-time = "2025-10-08T19:46:23.368Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] [[package]] @@ -1158,7 +1458,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1166,96 +1466,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/35/d319ed522433215526689bad428a94058b6dd12190ce7ddd78618ac14b28/pydantic-2.12.2.tar.gz", hash = "sha256:7b8fa15b831a4bbde9d5b84028641ac3080a4ca2cbd4a621a661687e741624fd", size = 816358, upload-time = "2025-10-14T15:02:21.842Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/6c/98/468cb649f208a6f1279448e6e5247b37ae79cf5e4041186f1e2ef3d16345/pydantic-2.12.2-py3-none-any.whl", hash = "sha256:25ff718ee909acd82f1ff9b1a4acfd781bb23ab3739adaa7144f19a6a4e231ae", size = 460628, upload-time = "2025-10-14T15:02:19.623Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, + { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, + { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, + { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, + { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, + { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, + { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, + { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, + { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, + { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, + { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, + { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, + { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, + { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, + { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, + { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, + { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, + { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, + { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, + { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, + { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, + { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, + { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, + { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, + { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, + { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, + { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, + { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, + { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, + { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, + { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, + { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, + { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, + { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, + { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, + { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, + { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, + { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, + { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, + { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, + { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, + { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, + { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, + { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, + { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, + { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, + { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, + { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, ] [[package]] @@ -1311,24 +1638,24 @@ wheels = [ [[package]] name = "pytest-codspeed" -version = "4.0.0" +version = "4.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi" }, { name = "pytest" }, { name = "rich" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/13/4989d50a3d6de9fb91de23f3b6ffce7c704f23516d308138242325a7c857/pytest_codspeed-4.0.0.tar.gz", hash = "sha256:0e9af08ca93ad897b376771db92693a81aa8990eecc2a778740412e00a6f6eaf", size = 107630, upload-time = "2025-07-10T08:37:53.518Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/ae/5d89a787151868d3ebd813b24a13fe4ac7e60148cf1b3454d351c8b99f69/pytest_codspeed-4.1.1.tar.gz", hash = "sha256:9acc3394cc8aafd4543193254831d87de6be79accfdbd43475919fdaa2fc8d81", size = 113149, upload-time = "2025-10-07T16:30:02.709Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/c9/c7116338e04d1c6bb43277c5f938fa7e5eb1df54b4cc0c298a428995296b/pytest_codspeed-4.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2517731b20a6aa9fe61d04822b802e1637ee67fd865189485b384a9d5897117f", size = 230409, upload-time = "2025-07-10T08:37:40.83Z" }, - { url = "https://files.pythonhosted.org/packages/e6/00/c21b0e2863c967c8d4dfa5bebdc5f0f2a9d6ab1cc7a39e111faf70a5880d/pytest_codspeed-4.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e5076bb5119d4f8248822b5cd6b768f70a18c7e1a7fbcd96a99cd4a6430096e", size = 221135, upload-time = "2025-07-10T08:37:42.255Z" }, - { url = "https://files.pythonhosted.org/packages/7f/e7/16b0f347fd910f2cc50e858094c17744d640e5ae71926c2c0ad762ecb7ec/pytest_codspeed-4.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:06b324acdfe2076a0c97a9d31e8645f820822d6f0e766c73426767ff887a9381", size = 230418, upload-time = "2025-07-10T08:37:43.602Z" }, - { url = "https://files.pythonhosted.org/packages/47/a7/2b3ac30e1e2b326abf370c8a6b4ed48a43d3a5491def7aaf67f7fbab5d6f/pytest_codspeed-4.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ebdac1a4d6138e1ca4f5391e7e3cafad6e3aa6d5660d1b243871b691bc1396c", size = 221131, upload-time = "2025-07-10T08:37:44.708Z" }, - { url = "https://files.pythonhosted.org/packages/11/e4/a9591949783cdea60d5f2a215d89c3e17af7b068f2613e38b1d46cb5b8e9/pytest_codspeed-4.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f3def79d4072867d038a33e7f35bc7fb1a2a75236a624b3a690c5540017cb38", size = 230601, upload-time = "2025-07-10T08:37:46.018Z" }, - { url = "https://files.pythonhosted.org/packages/16/fe/22caa7cfb6717d21ba14ffd3c0b013b2143a4c32225715f401489f6c32bc/pytest_codspeed-4.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01d29d4538c2d111c0034f71811bcce577304506d22af4dd65df87fadf3ab495", size = 221230, upload-time = "2025-07-10T08:37:46.997Z" }, - { url = "https://files.pythonhosted.org/packages/55/e2/0a2e703301f7560a456e343e1b31d01a2ddee96807db5ded65951bfa5b7a/pytest_codspeed-4.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90894c93c9e23f12487b7fdf16c28da8f6275d565056772072beb41a72a54cf9", size = 230591, upload-time = "2025-07-10T08:37:47.961Z" }, - { url = "https://files.pythonhosted.org/packages/17/fc/5fee0bcdada8ecb5a89088cd84af7e094652fc94bf414a96b49a874fd8be/pytest_codspeed-4.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:79e9c40852fa7fc76776db4f1d290eceaeee2d6c5d2dc95a66c7cc690d83889e", size = 221227, upload-time = "2025-07-10T08:37:49.113Z" }, - { url = "https://files.pythonhosted.org/packages/5f/e4/e3ddab5fd04febf6189d71bfa4ba2d7c05adaa7d692a6d6b1e8ed68de12d/pytest_codspeed-4.0.0-py3-none-any.whl", hash = "sha256:c5debd4b127dc1c507397a8304776f52cabbfa53aad6f51eae329a5489df1e06", size = 107084, upload-time = "2025-07-10T08:37:52.65Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b4/6e76539ef17e398c06464c6d92da1b8978b46180df5446cdb76e2293c254/pytest_codspeed-4.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa83a1a0aaeb6bdb9918a18294708eebe765a3b5a855adccf9213629d2a0d302", size = 261944, upload-time = "2025-10-07T16:29:50.024Z" }, + { url = "https://files.pythonhosted.org/packages/43/6a/ba8aca4a07bb8f19fdeba752c7276b35cb63ff743695b137c2f68a53cf21/pytest_codspeed-4.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e6fe213b2589ffe6f2189b3b21ca14717c9346b226e6028d2e2b4d4d7dac750f", size = 249258, upload-time = "2025-10-07T16:29:52.694Z" }, + { url = "https://files.pythonhosted.org/packages/5e/13/be5ab2167d7fb477ba86061dfb6dee3b6feb698ef7d35e8912ba0cce94d5/pytest_codspeed-4.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94b3bd5a71bfab4478e9a9b5058237cf2b34938570b43495093c2ea213175bd5", size = 261978, upload-time = "2025-10-07T16:29:53.671Z" }, + { url = "https://files.pythonhosted.org/packages/43/7f/60445f5e9bdaff4b9da1d7e4964e34d87bc160867aec2d99c1119e38b3f8/pytest_codspeed-4.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfc1efdbcc92fb4b4cbc8eaa8d7387664b063c17e025985ece4816100f1fff29", size = 249269, upload-time = "2025-10-07T16:29:54.907Z" }, + { url = "https://files.pythonhosted.org/packages/6e/bf/b1d78c982d575636b38688f284bfd6836f5f232d95cef661a6345a91cc7b/pytest_codspeed-4.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:506d446d2911e5188aca7be702c2850a9b8680a72ed241a633d7edaeef00ac13", size = 262202, upload-time = "2025-10-07T16:29:55.792Z" }, + { url = "https://files.pythonhosted.org/packages/45/23/9ace1fcd72a84d845f522e06badada7d85f6a5a4d4aed54509b51af87068/pytest_codspeed-4.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1773c74394c98317c6846e9eb60c352222c031bdf1ded109f5c35772a3ce6dc2", size = 249554, upload-time = "2025-10-07T16:29:56.681Z" }, + { url = "https://files.pythonhosted.org/packages/da/45/09fa57357d46e46de968554cd0a20e4a8b2a48971fec9564c215c67ebcde/pytest_codspeed-4.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f8af528f7f950cb745971fc1e9f59ebc52cc4c51a7eac7a931577fd55d21b94", size = 262209, upload-time = "2025-10-07T16:29:57.659Z" }, + { url = "https://files.pythonhosted.org/packages/65/e5/ac697d8e249be059d4bec9ebcb22a5626e0d18fd6944665b6d0edca3a508/pytest_codspeed-4.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:db8b2b71cabde1a7ae77a29a3ce67bcb852c28d5599b4eb7428fdb26cd067815", size = 249559, upload-time = "2025-10-07T16:29:58.58Z" }, + { url = "https://files.pythonhosted.org/packages/31/b0/6f0502ca6497e4c53f2f425b57854b759f29d186804a64d090d9ac2878ca/pytest_codspeed-4.1.1-py3-none-any.whl", hash = "sha256:a0a7aa318b09d87541f4f65db9cd473b53d4f1589598d883b238fe208ae2ac8b", size = 113601, upload-time = "2025-10-07T16:30:01.58Z" }, ] [[package]] @@ -1405,6 +1732,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5b/3a/c44a76c6bb5e9e896d9707fb1c704a31a0136950dec9514373ced0684d56/pytest_watcher-0.4.3-py3-none-any.whl", hash = "sha256:d59b1e1396f33a65ea4949b713d6884637755d641646960056a90b267c3460f9", size = 11852, upload-time = "2024-08-28T17:37:45.731Z" }, ] +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1510,15 +1850,15 @@ wheels = [ [[package]] name = "rich" -version = "14.1.0" +version = "14.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, ] [[package]] @@ -1588,41 +1928,51 @@ wheels = [ [[package]] name = "tomli" -version = "2.2.1" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, - { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, - { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, - { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, - { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, - { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, - { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, - { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, - { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, ] [[package]] @@ -1792,103 +2142,248 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, ] +[[package]] +name = "xxhash" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/ee/f9f1d656ad168681bb0f6b092372c1e533c4416b8069b1896a175c46e484/xxhash-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:87ff03d7e35c61435976554477a7f4cd1704c3596a89a8300d5ce7fc83874a71", size = 32845, upload-time = "2025-10-02T14:33:51.573Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/93508d9460b292c74a09b83d16750c52a0ead89c51eea9951cb97a60d959/xxhash-3.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f572dfd3d0e2eb1a57511831cf6341242f5a9f8298a45862d085f5b93394a27d", size = 30807, upload-time = "2025-10-02T14:33:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/07/55/28c93a3662f2d200c70704efe74aab9640e824f8ce330d8d3943bf7c9b3c/xxhash-3.6.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:89952ea539566b9fed2bbd94e589672794b4286f342254fad28b149f9615fef8", size = 193786, upload-time = "2025-10-02T14:33:54.272Z" }, + { url = "https://files.pythonhosted.org/packages/c1/96/fec0be9bb4b8f5d9c57d76380a366f31a1781fb802f76fc7cda6c84893c7/xxhash-3.6.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e6f2ffb07a50b52465a1032c3cf1f4a5683f944acaca8a134a2f23674c2058", size = 212830, upload-time = "2025-10-02T14:33:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/c706845ba77b9611f81fd2e93fad9859346b026e8445e76f8c6fd057cc6d/xxhash-3.6.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b5b848ad6c16d308c3ac7ad4ba6bede80ed5df2ba8ed382f8932df63158dd4b2", size = 211606, upload-time = "2025-10-02T14:33:57.133Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/164126a2999e5045f04a69257eea946c0dc3e86541b400d4385d646b53d7/xxhash-3.6.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a034590a727b44dd8ac5914236a7b8504144447a9682586c3327e935f33ec8cc", size = 444872, upload-time = "2025-10-02T14:33:58.446Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4b/55ab404c56cd70a2cf5ecfe484838865d0fea5627365c6c8ca156bd09c8f/xxhash-3.6.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a8f1972e75ebdd161d7896743122834fe87378160c20e97f8b09166213bf8cc", size = 193217, upload-time = "2025-10-02T14:33:59.724Z" }, + { url = "https://files.pythonhosted.org/packages/45/e6/52abf06bac316db33aa269091ae7311bd53cfc6f4b120ae77bac1b348091/xxhash-3.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ee34327b187f002a596d7b167ebc59a1b729e963ce645964bbc050d2f1b73d07", size = 210139, upload-time = "2025-10-02T14:34:02.041Z" }, + { url = "https://files.pythonhosted.org/packages/34/37/db94d490b8691236d356bc249c08819cbcef9273a1a30acf1254ff9ce157/xxhash-3.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:339f518c3c7a850dd033ab416ea25a692759dc7478a71131fe8869010d2b75e4", size = 197669, upload-time = "2025-10-02T14:34:03.664Z" }, + { url = "https://files.pythonhosted.org/packages/b7/36/c4f219ef4a17a4f7a64ed3569bc2b5a9c8311abdb22249ac96093625b1a4/xxhash-3.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:bf48889c9630542d4709192578aebbd836177c9f7a4a2778a7d6340107c65f06", size = 210018, upload-time = "2025-10-02T14:34:05.325Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/bfac889a374fc2fc439a69223d1750eed2e18a7db8514737ab630534fa08/xxhash-3.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5576b002a56207f640636056b4160a378fe36a58db73ae5c27a7ec8db35f71d4", size = 413058, upload-time = "2025-10-02T14:34:06.925Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d1/555d8447e0dd32ad0930a249a522bb2e289f0d08b6b16204cfa42c1f5a0c/xxhash-3.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af1f3278bd02814d6dedc5dec397993b549d6f16c19379721e5a1d31e132c49b", size = 190628, upload-time = "2025-10-02T14:34:08.669Z" }, + { url = "https://files.pythonhosted.org/packages/d1/15/8751330b5186cedc4ed4b597989882ea05e0408b53fa47bcb46a6125bfc6/xxhash-3.6.0-cp310-cp310-win32.whl", hash = "sha256:aed058764db109dc9052720da65fafe84873b05eb8b07e5e653597951af57c3b", size = 30577, upload-time = "2025-10-02T14:34:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/bb/cc/53f87e8b5871a6eb2ff7e89c48c66093bda2be52315a8161ddc54ea550c4/xxhash-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e82da5670f2d0d98950317f82a0e4a0197150ff19a6df2ba40399c2a3b9ae5fb", size = 31487, upload-time = "2025-10-02T14:34:11.618Z" }, + { url = "https://files.pythonhosted.org/packages/9f/00/60f9ea3bb697667a14314d7269956f58bf56bb73864f8f8d52a3c2535e9a/xxhash-3.6.0-cp310-cp310-win_arm64.whl", hash = "sha256:4a082ffff8c6ac07707fb6b671caf7c6e020c75226c561830b73d862060f281d", size = 27863, upload-time = "2025-10-02T14:34:12.619Z" }, + { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, + { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, + { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, + { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, + { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, + { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, + { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, + { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, + { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, + { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, + { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, + { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, + { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, + { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, + { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, + { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, + { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, + { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, + { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, + { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, + { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, + { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, + { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, + { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, + { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, + { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, + { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, + { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, + { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, +] + [[package]] name = "yarl" -version = "1.20.1" +version = "1.22.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/65/7fed0d774abf47487c64be14e9223749468922817b5e8792b8a64792a1bb/yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4", size = 132910, upload-time = "2025-06-10T00:42:31.108Z" }, - { url = "https://files.pythonhosted.org/packages/8a/7b/988f55a52da99df9e56dc733b8e4e5a6ae2090081dc2754fc8fd34e60aa0/yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a", size = 90644, upload-time = "2025-06-10T00:42:33.851Z" }, - { url = "https://files.pythonhosted.org/packages/f7/de/30d98f03e95d30c7e3cc093759982d038c8833ec2451001d45ef4854edc1/yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed", size = 89322, upload-time = "2025-06-10T00:42:35.688Z" }, - { url = "https://files.pythonhosted.org/packages/e0/7a/f2f314f5ebfe9200724b0b748de2186b927acb334cf964fd312eb86fc286/yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e", size = 323786, upload-time = "2025-06-10T00:42:37.817Z" }, - { url = "https://files.pythonhosted.org/packages/15/3f/718d26f189db96d993d14b984ce91de52e76309d0fd1d4296f34039856aa/yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73", size = 319627, upload-time = "2025-06-10T00:42:39.937Z" }, - { url = "https://files.pythonhosted.org/packages/a5/76/8fcfbf5fa2369157b9898962a4a7d96764b287b085b5b3d9ffae69cdefd1/yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e", size = 339149, upload-time = "2025-06-10T00:42:42.627Z" }, - { url = "https://files.pythonhosted.org/packages/3c/95/d7fc301cc4661785967acc04f54a4a42d5124905e27db27bb578aac49b5c/yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8", size = 333327, upload-time = "2025-06-10T00:42:44.842Z" }, - { url = "https://files.pythonhosted.org/packages/65/94/e21269718349582eee81efc5c1c08ee71c816bfc1585b77d0ec3f58089eb/yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23", size = 326054, upload-time = "2025-06-10T00:42:47.149Z" }, - { url = "https://files.pythonhosted.org/packages/32/ae/8616d1f07853704523519f6131d21f092e567c5af93de7e3e94b38d7f065/yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70", size = 315035, upload-time = "2025-06-10T00:42:48.852Z" }, - { url = "https://files.pythonhosted.org/packages/48/aa/0ace06280861ef055855333707db5e49c6e3a08840a7ce62682259d0a6c0/yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb", size = 338962, upload-time = "2025-06-10T00:42:51.024Z" }, - { url = "https://files.pythonhosted.org/packages/20/52/1e9d0e6916f45a8fb50e6844f01cb34692455f1acd548606cbda8134cd1e/yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2", size = 335399, upload-time = "2025-06-10T00:42:53.007Z" }, - { url = "https://files.pythonhosted.org/packages/f2/65/60452df742952c630e82f394cd409de10610481d9043aa14c61bf846b7b1/yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30", size = 338649, upload-time = "2025-06-10T00:42:54.964Z" }, - { url = "https://files.pythonhosted.org/packages/7b/f5/6cd4ff38dcde57a70f23719a838665ee17079640c77087404c3d34da6727/yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309", size = 358563, upload-time = "2025-06-10T00:42:57.28Z" }, - { url = "https://files.pythonhosted.org/packages/d1/90/c42eefd79d0d8222cb3227bdd51b640c0c1d0aa33fe4cc86c36eccba77d3/yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24", size = 357609, upload-time = "2025-06-10T00:42:59.055Z" }, - { url = "https://files.pythonhosted.org/packages/03/c8/cea6b232cb4617514232e0f8a718153a95b5d82b5290711b201545825532/yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13", size = 350224, upload-time = "2025-06-10T00:43:01.248Z" }, - { url = "https://files.pythonhosted.org/packages/ce/a3/eaa0ab9712f1f3d01faf43cf6f1f7210ce4ea4a7e9b28b489a2261ca8db9/yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8", size = 81753, upload-time = "2025-06-10T00:43:03.486Z" }, - { url = "https://files.pythonhosted.org/packages/8f/34/e4abde70a9256465fe31c88ed02c3f8502b7b5dead693a4f350a06413f28/yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16", size = 86817, upload-time = "2025-06-10T00:43:05.231Z" }, - { url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e", size = 133833, upload-time = "2025-06-10T00:43:07.393Z" }, - { url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b", size = 91070, upload-time = "2025-06-10T00:43:09.538Z" }, - { url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b", size = 89818, upload-time = "2025-06-10T00:43:11.575Z" }, - { url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4", size = 347003, upload-time = "2025-06-10T00:43:14.088Z" }, - { url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1", size = 336537, upload-time = "2025-06-10T00:43:16.431Z" }, - { url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833", size = 362358, upload-time = "2025-06-10T00:43:18.704Z" }, - { url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d", size = 357362, upload-time = "2025-06-10T00:43:20.888Z" }, - { url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8", size = 348979, upload-time = "2025-06-10T00:43:23.169Z" }, - { url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf", size = 337274, upload-time = "2025-06-10T00:43:27.111Z" }, - { url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e", size = 363294, upload-time = "2025-06-10T00:43:28.96Z" }, - { url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389", size = 358169, upload-time = "2025-06-10T00:43:30.701Z" }, - { url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f", size = 362776, upload-time = "2025-06-10T00:43:32.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845", size = 381341, upload-time = "2025-06-10T00:43:34.543Z" }, - { url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1", size = 379988, upload-time = "2025-06-10T00:43:36.489Z" }, - { url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e", size = 371113, upload-time = "2025-06-10T00:43:38.592Z" }, - { url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773", size = 81485, upload-time = "2025-06-10T00:43:41.038Z" }, - { url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e", size = 86686, upload-time = "2025-06-10T00:43:42.692Z" }, - { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, - { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, - { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, - { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, - { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, - { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, - { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, - { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, - { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, - { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, - { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, - { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, - { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, - { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, - { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, - { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, - { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, - { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, - { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, - { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, - { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, - { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, - { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, - { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, - { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, - { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, - { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, - { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, - { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, - { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, - { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, - { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, - { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, - { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, - { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, - { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, - { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, - { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, - { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, - { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, - { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, - { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, - { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, - { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, - { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, - { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, - { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, - { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, - { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, - { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, - { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, + { url = "https://files.pythonhosted.org/packages/d1/43/a2204825342f37c337f5edb6637040fa14e365b2fcc2346960201d457579/yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e", size = 140517, upload-time = "2025-10-06T14:08:42.494Z" }, + { url = "https://files.pythonhosted.org/packages/44/6f/674f3e6f02266428c56f704cd2501c22f78e8b2eeb23f153117cc86fb28a/yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f", size = 93495, upload-time = "2025-10-06T14:08:46.2Z" }, + { url = "https://files.pythonhosted.org/packages/b8/12/5b274d8a0f30c07b91b2f02cba69152600b47830fcfb465c108880fcee9c/yarl-1.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf", size = 94400, upload-time = "2025-10-06T14:08:47.855Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7f/df1b6949b1fa1aa9ff6de6e2631876ad4b73c4437822026e85d8acb56bb1/yarl-1.22.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a", size = 347545, upload-time = "2025-10-06T14:08:49.683Z" }, + { url = "https://files.pythonhosted.org/packages/84/09/f92ed93bd6cd77872ab6c3462df45ca45cd058d8f1d0c9b4f54c1704429f/yarl-1.22.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c", size = 319598, upload-time = "2025-10-06T14:08:51.215Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/ac3f3feae7d522cf7ccec3d340bb0b2b61c56cb9767923df62a135092c6b/yarl-1.22.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147", size = 363893, upload-time = "2025-10-06T14:08:53.144Z" }, + { url = "https://files.pythonhosted.org/packages/06/49/f3219097403b9c84a4d079b1d7bda62dd9b86d0d6e4428c02d46ab2c77fc/yarl-1.22.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb", size = 371240, upload-time = "2025-10-06T14:08:55.036Z" }, + { url = "https://files.pythonhosted.org/packages/35/9f/06b765d45c0e44e8ecf0fe15c9eacbbde342bb5b7561c46944f107bfb6c3/yarl-1.22.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6", size = 346965, upload-time = "2025-10-06T14:08:56.722Z" }, + { url = "https://files.pythonhosted.org/packages/c5/69/599e7cea8d0fcb1694323b0db0dda317fa3162f7b90166faddecf532166f/yarl-1.22.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0", size = 342026, upload-time = "2025-10-06T14:08:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/95/6f/9dfd12c8bc90fea9eab39832ee32ea48f8e53d1256252a77b710c065c89f/yarl-1.22.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda", size = 335637, upload-time = "2025-10-06T14:09:00.506Z" }, + { url = "https://files.pythonhosted.org/packages/57/2e/34c5b4eb9b07e16e873db5b182c71e5f06f9b5af388cdaa97736d79dd9a6/yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc", size = 359082, upload-time = "2025-10-06T14:09:01.936Z" }, + { url = "https://files.pythonhosted.org/packages/31/71/fa7e10fb772d273aa1f096ecb8ab8594117822f683bab7d2c5a89914c92a/yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737", size = 357811, upload-time = "2025-10-06T14:09:03.445Z" }, + { url = "https://files.pythonhosted.org/packages/26/da/11374c04e8e1184a6a03cf9c8f5688d3e5cec83ed6f31ad3481b3207f709/yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467", size = 351223, upload-time = "2025-10-06T14:09:05.401Z" }, + { url = "https://files.pythonhosted.org/packages/82/8f/e2d01f161b0c034a30410e375e191a5d27608c1f8693bab1a08b089ca096/yarl-1.22.0-cp310-cp310-win32.whl", hash = "sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea", size = 82118, upload-time = "2025-10-06T14:09:11.148Z" }, + { url = "https://files.pythonhosted.org/packages/62/46/94c76196642dbeae634c7a61ba3da88cd77bed875bf6e4a8bed037505aa6/yarl-1.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca", size = 86852, upload-time = "2025-10-06T14:09:12.958Z" }, + { url = "https://files.pythonhosted.org/packages/af/af/7df4f179d3b1a6dcb9a4bd2ffbc67642746fcafdb62580e66876ce83fff4/yarl-1.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b", size = 82012, upload-time = "2025-10-06T14:09:14.664Z" }, + { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" }, + { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" }, + { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406, upload-time = "2025-10-06T14:09:21.402Z" }, + { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581, upload-time = "2025-10-06T14:09:22.98Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924, upload-time = "2025-10-06T14:09:24.655Z" }, + { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890, upload-time = "2025-10-06T14:09:26.617Z" }, + { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819, upload-time = "2025-10-06T14:09:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601, upload-time = "2025-10-06T14:09:30.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072, upload-time = "2025-10-06T14:09:32.528Z" }, + { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" }, + { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" }, + { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" }, + { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" }, + { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, + { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, + { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, + { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, + { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, + { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, + { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, + { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, + { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, + { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, + { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, + { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, + { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, + { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, + { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, + { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, + { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, + { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, + { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, + { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, + { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, + { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, + { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, + { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, + { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, + { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, + { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, + { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, + { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" }, + { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" }, + { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" }, + { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" }, + { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" }, + { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" }, + { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" }, + { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" }, + { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" }, + { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" }, + { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" }, + { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" }, + { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" }, + { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" }, + { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" }, + { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" }, + { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" }, + { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" }, + { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" }, + { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, + { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, ] [[package]] diff --git a/libs/partners/chroma/README.md b/libs/partners/chroma/README.md index c060cdde6c6..960727432a0 100644 --- a/libs/partners/chroma/README.md +++ b/libs/partners/chroma/README.md @@ -1,5 +1,22 @@ # langchain-chroma +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-chroma?label=%20)](https://pypi.org/project/langchain-chroma/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-chroma)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-chroma)](https://pypistats.org/packages/langchain-chroma) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-chroma +``` + +## πŸ€” What is this? + This package contains the LangChain integration with Chroma. +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/chroma) for more details. diff --git a/libs/partners/chroma/langchain_chroma/vectorstores.py b/libs/partners/chroma/langchain_chroma/vectorstores.py index 94787621ae7..fd33edcb509 100644 --- a/libs/partners/chroma/langchain_chroma/vectorstores.py +++ b/libs/partners/chroma/langchain_chroma/vectorstores.py @@ -51,18 +51,25 @@ def _results_to_docs_and_scores(results: Any) -> list[tuple[Document, float]]: results["distances"][0], strict=False, ) + if result[0] is not None ] def _results_to_docs_and_vectors(results: Any) -> list[tuple[Document, np.ndarray]]: + """Convert ChromaDB results to documents and vectors, filtering out None content.""" return [ - (Document(page_content=result[0], metadata=result[1] or {}), result[2]) + ( + Document(page_content=result[0], metadata=result[1] or {}, id=result[3]), + result[2], + ) for result in zip( results["documents"][0], results["metadatas"][0], results["embeddings"][0], + results["ids"][0], strict=False, ) + if result[0] is not None ] @@ -110,11 +117,10 @@ def maximal_marginal_relevance( Args: query_embedding: Query embedding. embedding_list: List of embeddings to select from. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - k: Number of Documents to return. Defaults to 4. + lambda_mult: Number between `0` and `1` that determines the degree + of diversity among the results with `0` corresponding + to maximum diversity and `1` to minimum diversity. + k: Number of Documents to return. Returns: List of indices of embeddings selected by maximal marginal relevance. @@ -157,31 +163,31 @@ class Chroma(VectorStore): ``` Key init args β€” indexing params: - collection_name: str + collection_name: Name of the collection. - embedding_function: Embeddings + embedding_function: Embedding function to use. Key init args β€” client params: - client: Client | None + client: Chroma client to use. - client_settings: chromadb.config.Settings | None + client_settings: Chroma client settings. - persist_directory: str | None + persist_directory: Directory to persist the collection. - host: str | None + host: Hostname of a deployed Chroma server. - port: int | None + port: Connection port for a deployed Chroma server. Default is 8000. - ssl: bool | None + ssl: Whether to establish an SSL connection with a deployed Chroma server. Default is False. - headers: dict[str, str] | None + headers: HTTP headers to send to a deployed Chroma server. - chroma_cloud_api_key: str | None + chroma_cloud_api_key: Chroma Cloud API key. - tenant: str | None + tenant: Tenant ID. Required for Chroma Cloud connections. Default is 'default_tenant' for local Chroma servers. - database: str | None + database: Database name. Required for Chroma Cloud connections. Default is 'default_database'. Instantiate: @@ -332,7 +338,7 @@ class Chroma(VectorStore): client_settings: Chroma client settings collection_metadata: Collection configurations. collection_configuration: Index configuration for the collection. - Defaults to `None`. + client: Chroma client. Documentation: https://docs.trychroma.com/reference/python/client relevance_score_fn: Function to calculate relevance score from distance. @@ -455,7 +461,7 @@ class Chroma(VectorStore): Args: query_texts: List of query texts. query_embeddings: List of query embeddings. - n_results: Number of results to return. Defaults to 4. + n_results: Number of results to return. where: dict used to filter results by metadata. E.g. {"color" : "red"}. where_document: dict used to filter by the document contents. @@ -506,7 +512,7 @@ class Chroma(VectorStore): metadatas: list[dict] | None = None, ids: list[str] | None = None, ) -> list[str]: - """Run more images through the embeddings and add to the vectorstore. + """Run more images through the embeddings and add to the `VectorStore`. Args: uris: File path to the image. @@ -595,10 +601,10 @@ class Chroma(VectorStore): ids: list[str] | None = None, **kwargs: Any, ) -> list[str]: - """Run more texts through the embeddings and add to the vectorstore. + """Run more texts through the embeddings and add to the `VectorStore`. Args: - texts: Texts to add to the vectorstore. + texts: Texts to add to the `VectorStore`. metadatas: Optional list of metadatas. When querying, you can filter on this metadata. ids: Optional list of IDs. (Items without IDs will be assigned UUIDs) @@ -686,8 +692,8 @@ class Chroma(VectorStore): Args: query: Query text to search for. - k: Number of results to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of results to return. + filter: Filter by metadata. kwargs: Additional keyword arguments to pass to Chroma collection query. Returns: @@ -713,14 +719,14 @@ class Chroma(VectorStore): Args: embedding: Embedding to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of Documents to return. + filter: Filter by metadata. where_document: dict used to filter by the document contents. E.g. {"$contains": "hello"}. kwargs: Additional keyword arguments to pass to Chroma collection query. Returns: - List of Documents most similar to the query vector. + List of `Document` objects most similar to the query vector. """ results = self.__query_collection( query_embeddings=[embedding], @@ -743,8 +749,8 @@ class Chroma(VectorStore): Args: embedding (List[float]): Embedding to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of Documents to return. + filter: Filter by metadata. where_document: dict used to filter by the documents. E.g. {"$contains": "hello"}. kwargs: Additional keyword arguments to pass to Chroma collection query. @@ -774,8 +780,8 @@ class Chroma(VectorStore): Args: query: Query text to search for. - k: Number of results to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of results to return. + filter: Filter by metadata. where_document: dict used to filter by document contents. E.g. {"$contains": "hello"}. kwargs: Additional keyword arguments to pass to Chroma collection query. @@ -816,8 +822,8 @@ class Chroma(VectorStore): Args: query: Query text to search for. - k: Number of results to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of results to return. + filter: Filter by metadata. where_document: dict used to filter by the document contents. E.g. {"$contains": "hello"}. kwargs: Additional keyword arguments to pass to Chroma collection query. @@ -988,20 +994,18 @@ class Chroma(VectorStore): Args: embedding: Embedding to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - fetch_k: Number of Documents to fetch to pass to MMR algorithm. Defaults to - 20. + k: Number of `Document` objects to return. + fetch_k: Number of `Document` objects to fetch to pass to MMR algorithm. lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter: Filter by metadata. Defaults to `None`. + of diversity among the results with `0` corresponding + to maximum diversity and `1` to minimum diversity. + filter: Filter by metadata. where_document: dict used to filter by the document contents. - E.g. {"$contains": "hello"}. + e.g. `{"$contains": "hello"}`. kwargs: Additional keyword arguments to pass to Chroma collection query. Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. """ results = self.__query_collection( query_embeddings=[embedding], @@ -1039,19 +1043,18 @@ class Chroma(VectorStore): Args: query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. + k: Number of Documents to return. fetch_k: Number of Documents to fetch to pass to MMR algorithm. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter: Filter by metadata. Defaults to `None`. + lambda_mult: Number between `0` and `1` that determines the degree + of diversity among the results with `0` corresponding + to maximum diversity and `1` to minimum diversity. + filter: Filter by metadata. where_document: dict used to filter by the document contents. - E.g. {"$contains": "hello"}. + e.g. `{"$contains": "hello"}`. kwargs: Additional keyword arguments to pass to Chroma collection query. Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. Raises: ValueError: If the embedding function is not provided. @@ -1146,7 +1149,7 @@ class Chroma(VectorStore): ids: List of ids to retrieve. Returns: - List of Documents. + List of `Document` objects. !!! version-added "Added in 0.2.1" """ @@ -1159,6 +1162,7 @@ class Chroma(VectorStore): results["ids"], strict=False, ) + if doc is not None # Filter out documents with None page_content ] def update_document(self, document_id: str, document: Document) -> None: @@ -1262,15 +1266,15 @@ class Chroma(VectorStore): Default is 'default_tenant' for local Chroma servers. database: Database name. Required for Chroma Cloud connections. Default is 'default_database'. - embedding: Embedding function. Defaults to `None`. - metadatas: List of metadatas. Defaults to `None`. - ids: List of document IDs. Defaults to `None`. + embedding: Embedding function. + metadatas: List of metadatas. + ids: List of document IDs. client_settings: Chroma client settings. client: Chroma client. Documentation: https://docs.trychroma.com/reference/python/client - collection_metadata: Collection configurations. Defaults to `None`. + collection_metadata: Collection configurations. collection_configuration: Index configuration for the collection. - Defaults to `None`. + kwargs: Additional keyword arguments to initialize a Chroma client. Returns: @@ -1354,22 +1358,21 @@ class Chroma(VectorStore): host: Hostname of a deployed Chroma server. port: Connection port for a deployed Chroma server. Default is 8000. ssl: Whether to establish an SSL connection with a deployed Chroma server. - Default is False. headers: HTTP headers to send to a deployed Chroma server. chroma_cloud_api_key: Chroma Cloud API key. tenant: Tenant ID. Required for Chroma Cloud connections. Default is 'default_tenant' for local Chroma servers. database: Database name. Required for Chroma Cloud connections. Default is 'default_database'. - ids : List of document IDs. Defaults to `None`. - documents: List of documents to add to the vectorstore. - embedding: Embedding function. Defaults to `None`. + ids: List of document IDs. + documents: List of documents to add to the `VectorStore`. + embedding: Embedding function. client_settings: Chroma client settings. client: Chroma client. Documentation: https://docs.trychroma.com/reference/python/client - collection_metadata: Collection configurations. Defaults to `None`. + collection_metadata: Collection configurations. collection_configuration: Index configuration for the collection. - Defaults to `None`. + kwargs: Additional keyword arguments to initialize a Chroma client. Returns: diff --git a/libs/partners/chroma/pyproject.toml b/libs/partners/chroma/pyproject.toml index 4fa46860ef5..5c38cea29a7 100644 --- a/libs/partners/chroma/pyproject.toml +++ b/libs/partners/chroma/pyproject.toml @@ -7,24 +7,24 @@ authors = [] license = { text = "MIT" } requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", "numpy>=1.26.0; python_version < '3.13'", "numpy>=2.1.0; python_version >= '3.13'", "chromadb>=1.0.20,<2.0.0", ] name = "langchain-chroma" -version = "1.0.0a1" +version = "1.0.0" description = "An integration package connecting Chroma and LangChain." readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/chroma" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/chroma" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-chroma%22" -docs = "https://reference.langchain.com/python/integrations/langchain_chroma/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/chroma" +Documentation = "https://reference.langchain.com/python/integrations/langchain_chroma/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/chroma" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-chroma%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ diff --git a/libs/partners/chroma/tests/integration_tests/test_vectorstores.py b/libs/partners/chroma/tests/integration_tests/test_vectorstores.py index 3c4b0ae75ff..548c1c04f1d 100644 --- a/libs/partners/chroma/tests/integration_tests/test_vectorstores.py +++ b/libs/partners/chroma/tests/integration_tests/test_vectorstores.py @@ -193,7 +193,10 @@ def test_chroma_with_metadatas_with_vectors() -> None: vec_1 = embeddings.embed_query(texts[0]) output = docsearch.similarity_search_with_vectors("foo", k=1) docsearch.delete_collection() - assert output[0][0] == Document(page_content="foo", metadata={"page": "0"}) + doc = output[0][0] + assert doc.page_content == "foo" + assert doc.metadata == {"page": "0"} + assert doc.id is not None assert (output[0][1] == vec_1).all() @@ -825,3 +828,44 @@ def test_delete_where_clause(client: chromadb.ClientAPI) -> None: assert vectorstore._collection.count() == 1 # Clean up vectorstore.delete_collection() + + +def test_chroma_handles_none_page_content() -> None: + """Test that Chroma gracefully handles None page_content values.""" + from langchain_chroma.vectorstores import _results_to_docs_and_scores + + mock_results = { + "documents": [["valid content", None, "another valid content"]], + "metadatas": [[{"key": "value1"}, {"key": "value2"}, {"key": "value3"}]], + "ids": [["id1", "id2", "id3"]], + "distances": [[0.1, 0.2, 0.3]], + } + + docs_and_scores = _results_to_docs_and_scores(mock_results) + + assert len(docs_and_scores) == 2 + assert docs_and_scores[0][0].page_content == "valid content" + assert docs_and_scores[1][0].page_content == "another valid content" + assert docs_and_scores[0][0].id == "id1" + assert docs_and_scores[1][0].id == "id3" + + +def test_chroma_handles_none_page_content_with_vectors() -> None: + """Test that Chroma gracefully handles None page_content values with vectors.""" + from langchain_chroma.vectorstores import _results_to_docs_and_vectors + + mock_results = { + "documents": [["valid content", None, "another valid content"]], + "metadatas": [[{"key": "value1"}, {"key": "value2"}, {"key": "value3"}]], + "ids": [["id1", "id2", "id3"]], + "embeddings": [[[0.1, 0.2], [0.3, 0.4], [0.5, 0.6]]], + } + docs_and_vectors = _results_to_docs_and_vectors(mock_results) + + assert len(docs_and_vectors) == 2 + assert docs_and_vectors[0][0].page_content == "valid content" + assert docs_and_vectors[1][0].page_content == "another valid content" + assert docs_and_vectors[0][0].id == "id1" + assert docs_and_vectors[1][0].id == "id3" + assert docs_and_vectors[0][1] == [0.1, 0.2] + assert docs_and_vectors[1][1] == [0.5, 0.6] diff --git a/libs/partners/chroma/uv.lock b/libs/partners/chroma/uv.lock index 1d128ef1480..50e283ae68e 100644 --- a/libs/partners/chroma/uv.lock +++ b/libs/partners/chroma/uv.lock @@ -764,7 +764,7 @@ wheels = [ [[package]] name = "langchain-chroma" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "chromadb" }, @@ -839,7 +839,7 @@ typing = [ [[package]] name = "langchain-core" -version = "1.0.0a8" +version = "1.0.0" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -897,7 +897,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, diff --git a/libs/partners/deepseek/README.md b/libs/partners/deepseek/README.md index ff4b133999b..523f142df7f 100644 --- a/libs/partners/deepseek/README.md +++ b/libs/partners/deepseek/README.md @@ -1,5 +1,22 @@ # langchain-deepseek -This package contains the LangChain integration with the DeepSeek API +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-deepseek?label=%20)](https://pypi.org/project/langchain-deepseek/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-deepseek)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-deepseek)](https://pypistats.org/packages/langchain-deepseek) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-deepseek +``` + +## πŸ€” What is this? + +This package contains the LangChain integration with DeepSeek. + +## πŸ“– Documentation View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/deepseek) for more details. diff --git a/libs/partners/deepseek/langchain_deepseek/chat_models.py b/libs/partners/deepseek/langchain_deepseek/chat_models.py index 7abfd3ebd63..df34c21bc08 100644 --- a/libs/partners/deepseek/langchain_deepseek/chat_models.py +++ b/libs/partners/deepseek/langchain_deepseek/chat_models.py @@ -38,20 +38,20 @@ class ChatDeepSeek(BaseChatOpenAI): ``` Key init args β€” completion params: - model: str - Name of DeepSeek model to use, e.g. "deepseek-chat". - temperature: float + model: + Name of DeepSeek model to use, e.g. `"deepseek-chat"`. + temperature: Sampling temperature. - max_tokens: int | None + max_tokens: Max number of tokens to generate. Key init args β€” client params: - timeout: float | None + timeout: Timeout for requests. - max_retries: int + max_retries: Max number of retries. - api_key: str | None - DeepSeek API key. If not passed in will be read from env var DEEPSEEK_API_KEY. + api_key: + DeepSeek API key. If not passed in will be read from env var `DEEPSEEK_API_KEY`. See full list of supported init args and their descriptions in the params section. @@ -59,7 +59,7 @@ class ChatDeepSeek(BaseChatOpenAI): ```python from langchain_deepseek import ChatDeepSeek - llm = ChatDeepSeek( + model = ChatDeepSeek( model="...", temperature=0, max_tokens=None, @@ -76,16 +76,16 @@ class ChatDeepSeek(BaseChatOpenAI): ("system", "You are a helpful translator. Translate the user sentence to French."), ("human", "I love programming."), ] - llm.invoke(messages) + model.invoke(messages) ``` Stream: ```python - for chunk in llm.stream(messages): + for chunk in model.stream(messages): print(chunk.text, end="") ``` ```python - stream = llm.stream(messages) + stream = model.stream(messages) full = next(stream) for chunk in stream: full += chunk @@ -94,13 +94,13 @@ class ChatDeepSeek(BaseChatOpenAI): Async: ```python - await llm.ainvoke(messages) + await model.ainvoke(messages) # stream: - # async for chunk in (await llm.astream(messages)) + # async for chunk in (await model.astream(messages)) # batch: - # await llm.abatch([messages]) + # await model.abatch([messages]) ``` Tool calling: @@ -120,8 +120,8 @@ class ChatDeepSeek(BaseChatOpenAI): location: str = Field(..., description="The city and state, e.g. San Francisco, CA") - llm_with_tools = llm.bind_tools([GetWeather, GetPopulation]) - ai_msg = llm_with_tools.invoke("Which city is hotter today and which is bigger: LA or NY?") + model_with_tools = model.bind_tools([GetWeather, GetPopulation]) + ai_msg = model_with_tools.invoke("Which city is hotter today and which is bigger: LA or NY?") ai_msg.tool_calls ``` @@ -142,23 +142,24 @@ class ChatDeepSeek(BaseChatOpenAI): rating: int | None = Field(description="How funny the joke is, from 1 to 10") - structured_llm = llm.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats") + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats") ``` See `ChatDeepSeek.with_structured_output()` for more. Token usage: ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.usage_metadata ``` ```python {"input_tokens": 28, "output_tokens": 5, "total_tokens": 33} ``` - Response metadata + + Response metadata: ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.response_metadata ``` """ # noqa: E501 @@ -263,6 +264,11 @@ class ChatDeepSeek(BaseChatOpenAI): if not isinstance(response, openai.BaseModel): return rtn + for generation in rtn.generations: + if generation.message.response_metadata is None: + generation.message.response_metadata = {} + generation.message.response_metadata["model_provider"] = "deepseek" + choices = getattr(response, "choices", None) if choices and hasattr(choices[0].message, "reasoning_content"): rtn.generations[0].message.additional_kwargs["reasoning_content"] = choices[ @@ -294,6 +300,10 @@ class ChatDeepSeek(BaseChatOpenAI): if (choices := chunk.get("choices")) and generation_chunk: top = choices[0] if isinstance(generation_chunk.message, AIMessageChunk): + generation_chunk.message.response_metadata = { + **generation_chunk.message.response_metadata, + "model_provider": "deepseek", + } if ( reasoning_content := top.get("delta", {}).get("reasoning_content") ) is not None: @@ -376,17 +386,19 @@ class ChatDeepSeek(BaseChatOpenAI): Args: schema: The output schema. Can be passed in as: - - an OpenAI function/tool schema, - - a JSON Schema, - - a `TypedDict` class (support added in 0.1.20), - - or a Pydantic class. + - An OpenAI function/tool schema, + - A JSON Schema, + - A `TypedDict` class, + - Or a Pydantic class. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. + + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. method: The method for steering model generation, one of: @@ -395,16 +407,19 @@ class ChatDeepSeek(BaseChatOpenAI): - `'json_mode'`: Uses DeepSeek's [JSON mode feature](https://api-docs.deepseek.com/guides/json_mode). - !!! warning "Behavior changed in 0.1.3" - Added support for `'json_mode'`. - include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. strict: Whether to enable strict schema adherence when generating the function @@ -416,18 +431,19 @@ class ChatDeepSeek(BaseChatOpenAI): kwargs: Additional keyword args aren't supported. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is False and `schema` is a Pydantic class, Runnable outputs - an instance of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is False then Runnable outputs a dict. + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - If `include_raw` is True, then Runnable outputs a dict with keys: - - - `'raw'`: BaseMessage - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None - - """ # noqa: E501 + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` + """ # Some applications require that incompatible parameters (e.g., unsupported # methods) be handled. if method == "json_schema": diff --git a/libs/partners/deepseek/pyproject.toml b/libs/partners/deepseek/pyproject.toml index 759ceb4e8a3..5bd91f882d1 100644 --- a/libs/partners/deepseek/pyproject.toml +++ b/libs/partners/deepseek/pyproject.toml @@ -3,26 +3,27 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-deepseek" +description = "An integration package connecting DeepSeek and LangChain" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.0" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", - "langchain-openai>=1.0.0a3,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", + "langchain-openai>=1.0.0,<2.0.0", ] -name = "langchain-deepseek" -version = "1.0.0a1" -description = "An integration package connecting DeepSeek and LangChain" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/deepseek" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/deepseek" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-deepseek%22" -docs = "https://reference.langchain.com/python/integrations/langchain_deepseek/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/deepseek" +Documentation = "https://reference.langchain.com/python/integrations/langchain_deepseek/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/deepseek" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-deepseek%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ diff --git a/libs/partners/deepseek/tests/integration_tests/test_chat_models.py b/libs/partners/deepseek/tests/integration_tests/test_chat_models.py index 817162901fc..55976552a65 100644 --- a/libs/partners/deepseek/tests/integration_tests/test_chat_models.py +++ b/libs/partners/deepseek/tests/integration_tests/test_chat_models.py @@ -10,6 +10,8 @@ from langchain_tests.integration_tests import ChatModelIntegrationTests from langchain_deepseek.chat_models import ChatDeepSeek +MODEL_NAME = "deepseek-chat" + class TestChatDeepSeek(ChatModelIntegrationTests): """Test `ChatDeepSeek` chat model.""" @@ -23,7 +25,7 @@ class TestChatDeepSeek(ChatModelIntegrationTests): def chat_model_params(self) -> dict: """Parameters to create chat model instance for testing.""" return { - "model": "deepseek-chat", + "model": MODEL_NAME, "temperature": 0, } @@ -49,6 +51,14 @@ def test_reasoning_content() -> None: response = chat_model.invoke("What is 3^3?") assert response.content assert response.additional_kwargs["reasoning_content"] + + content_blocks = response.content_blocks + assert content_blocks is not None + assert len(content_blocks) > 0 + reasoning_blocks = [ + block for block in content_blocks if block.get("type") == "reasoning" + ] + assert len(reasoning_blocks) > 0 raise ValueError @@ -61,3 +71,11 @@ def test_reasoning_content_streaming() -> None: full = chunk if full is None else full + chunk assert isinstance(full, AIMessageChunk) assert full.additional_kwargs["reasoning_content"] + + content_blocks = full.content_blocks + assert content_blocks is not None + assert len(content_blocks) > 0 + reasoning_blocks = [ + block for block in content_blocks if block.get("type") == "reasoning" + ] + assert len(reasoning_blocks) > 0 diff --git a/libs/partners/deepseek/tests/unit_tests/test_chat_models.py b/libs/partners/deepseek/tests/unit_tests/test_chat_models.py index d4bde140007..31be8ab98c1 100644 --- a/libs/partners/deepseek/tests/unit_tests/test_chat_models.py +++ b/libs/partners/deepseek/tests/unit_tests/test_chat_models.py @@ -13,6 +13,8 @@ from pydantic import SecretStr from langchain_deepseek.chat_models import ChatDeepSeek +MODEL_NAME = "deepseek-chat" + class MockOpenAIResponse(BaseModel): """Mock OpenAI response model.""" @@ -35,7 +37,7 @@ class MockOpenAIResponse(BaseModel): context: dict[str, Any] | None = None, serialize_as_any: bool = False, ) -> dict[str, Any]: - """Convert to dictionary, ensuring reasoning_content is included.""" + """Convert to dictionary, ensuring `reasoning_content` is included.""" choices_list = [] for choice in self.choices: if isinstance(choice.message, ChatCompletionMessage): @@ -61,7 +63,7 @@ class MockOpenAIResponse(BaseModel): class TestChatDeepSeekUnit(ChatModelUnitTests): - """Unit tests for `ChatDeepSeek` chat model.""" + """Standard unit tests for `ChatDeepSeek` chat model.""" @property def chat_model_class(self) -> type[ChatDeepSeek]: @@ -77,7 +79,7 @@ class TestChatDeepSeekUnit(ChatModelUnitTests): "DEEPSEEK_API_BASE": "api_base", }, { - "model": "deepseek-chat", + "model": MODEL_NAME, }, { "api_key": "api_key", @@ -89,7 +91,7 @@ class TestChatDeepSeekUnit(ChatModelUnitTests): def chat_model_params(self) -> dict: """Parameters to create chat model instance for testing.""" return { - "model": "deepseek-chat", + "model": MODEL_NAME, "api_key": "api_key", } @@ -103,7 +105,7 @@ class TestChatDeepSeekCustomUnit: def test_create_chat_result_with_reasoning_content(self) -> None: """Test that reasoning_content is properly extracted from response.""" - chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key")) + chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key")) mock_message = MagicMock() mock_message.content = "Main content" mock_message.reasoning_content = "This is the reasoning content" @@ -120,8 +122,8 @@ class TestChatDeepSeekCustomUnit: ) def test_create_chat_result_with_model_extra_reasoning(self) -> None: - """Test that reasoning is properly extracted from model_extra.""" - chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key")) + """Test that reasoning is properly extracted from `model_extra`.""" + chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key")) mock_message = MagicMock(spec=ChatCompletionMessage) mock_message.content = "Main content" mock_message.role = "assistant" @@ -143,7 +145,7 @@ class TestChatDeepSeekCustomUnit: def test_convert_chunk_with_reasoning_content(self) -> None: """Test that reasoning_content is properly extracted from streaming chunk.""" - chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key")) + chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key")) chunk: dict[str, Any] = { "choices": [ { @@ -170,7 +172,7 @@ class TestChatDeepSeekCustomUnit: def test_convert_chunk_with_reasoning(self) -> None: """Test that reasoning is properly extracted from streaming chunk.""" - chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key")) + chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key")) chunk: dict[str, Any] = { "choices": [ { @@ -197,7 +199,7 @@ class TestChatDeepSeekCustomUnit: def test_convert_chunk_without_reasoning(self) -> None: """Test that chunk without reasoning fields works correctly.""" - chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key")) + chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key")) chunk: dict[str, Any] = {"choices": [{"delta": {"content": "Main content"}}]} chunk_result = chat_model._convert_chunk_to_generation_chunk( @@ -212,7 +214,7 @@ class TestChatDeepSeekCustomUnit: def test_convert_chunk_with_empty_delta(self) -> None: """Test that chunk with empty delta works correctly.""" - chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key")) + chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key")) chunk: dict[str, Any] = {"choices": [{"delta": {}}]} chunk_result = chat_model._convert_chunk_to_generation_chunk( @@ -227,7 +229,7 @@ class TestChatDeepSeekCustomUnit: def test_get_request_payload(self) -> None: """Test that tool message content is converted from list to string.""" - chat_model = ChatDeepSeek(model="deepseek-chat", api_key=SecretStr("api_key")) + chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key")) tool_message = ToolMessage(content=[], tool_call_id="test_id") payload = chat_model._get_request_payload([tool_message]) @@ -240,3 +242,72 @@ class TestChatDeepSeekCustomUnit: tool_message = ToolMessage(content="test string", tool_call_id="test_id") payload = chat_model._get_request_payload([tool_message]) assert payload["messages"][0]["content"] == "test string" + + def test_create_chat_result_with_model_provider(self) -> None: + """Test that `model_provider` is added to `response_metadata`.""" + chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key")) + mock_message = MagicMock() + mock_message.content = "Main content" + mock_message.role = "assistant" + mock_response = MockOpenAIResponse( + choices=[MagicMock(message=mock_message)], + error=None, + ) + + result = chat_model._create_chat_result(mock_response) + assert ( + result.generations[0].message.response_metadata.get("model_provider") + == "deepseek" + ) + + def test_convert_chunk_with_model_provider(self) -> None: + """Test that `model_provider` is added to `response_metadata` for chunks.""" + chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key")) + chunk: dict[str, Any] = { + "choices": [ + { + "delta": { + "content": "Main content", + }, + }, + ], + } + + chunk_result = chat_model._convert_chunk_to_generation_chunk( + chunk, + AIMessageChunk, + None, + ) + if chunk_result is None: + msg = "Expected chunk_result not to be None" + raise AssertionError(msg) + assert ( + chunk_result.message.response_metadata.get("model_provider") == "deepseek" + ) + + def test_create_chat_result_with_model_provider_multiple_generations( + self, + ) -> None: + """Test that `model_provider` is added to all generations when `n > 1`.""" + chat_model = ChatDeepSeek(model=MODEL_NAME, api_key=SecretStr("api_key")) + mock_message_1 = MagicMock() + mock_message_1.content = "First response" + mock_message_1.role = "assistant" + mock_message_2 = MagicMock() + mock_message_2.content = "Second response" + mock_message_2.role = "assistant" + + mock_response = MockOpenAIResponse( + choices=[ + MagicMock(message=mock_message_1), + MagicMock(message=mock_message_2), + ], + error=None, + ) + + result = chat_model._create_chat_result(mock_response) + assert len(result.generations) == 2 # noqa: PLR2004 + for generation in result.generations: + assert ( + generation.message.response_metadata.get("model_provider") == "deepseek" + ) diff --git a/libs/partners/deepseek/uv.lock b/libs/partners/deepseek/uv.lock index 8f97de18e14..66b3b57cb5d 100644 --- a/libs/partners/deepseek/uv.lock +++ b/libs/partners/deepseek/uv.lock @@ -370,7 +370,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a8" +version = "1.0.0" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -428,7 +428,7 @@ typing = [ [[package]] name = "langchain-deepseek" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "langchain-core" }, @@ -475,7 +475,7 @@ typing = [{ name = "mypy", specifier = ">=1.10.0,<2.0.0" }] [[package]] name = "langchain-openai" -version = "1.0.0a4" +version = "1.0.0" source = { editable = "../openai" } dependencies = [ { name = "langchain-core" }, @@ -495,8 +495,10 @@ dev = [{ name = "langchain-core", editable = "../../core" }] lint = [{ name = "ruff", specifier = ">=0.13.1,<0.14.0" }] test = [ { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, + { name = "langchain", editable = "../../langchain_v1" }, { name = "langchain-core", editable = "../../core" }, { name = "langchain-tests", editable = "../../standard-tests" }, + { name = "langgraph-prebuilt", specifier = ">=0.7.0rc1" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, { name = "pytest", specifier = ">=7.3.0,<8.0.0" }, @@ -514,7 +516,7 @@ test-integration = [ { name = "httpx", specifier = ">=0.27.0,<1.0.0" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, - { name = "pillow", specifier = ">=10.3.0,<11.0.0" }, + { name = "pillow", specifier = ">=10.3.0,<12.0.0" }, ] typing = [ { name = "langchain-core", editable = "../../core" }, @@ -524,7 +526,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -1147,7 +1149,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1155,96 +1157,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] @@ -1639,38 +1668,63 @@ wheels = [ [[package]] name = "tiktoken" -version = "0.11.0" +version = "0.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/86/ad0155a37c4f310935d5ac0b1ccf9bdb635dcb906e0a9a26b616dd55825a/tiktoken-0.11.0.tar.gz", hash = "sha256:3c518641aee1c52247c2b97e74d8d07d780092af79d5911a6ab5e79359d9b06a", size = 37648, upload-time = "2025-08-08T23:58:08.495Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/4d/c6a2e7dca2b4f2e9e0bfd62b3fe4f114322e2c028cfba905a72bc76ce479/tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8a9b517d6331d7103f8bef29ef93b3cca95fa766e293147fe7bacddf310d5917", size = 1059937, upload-time = "2025-08-08T23:57:28.57Z" }, - { url = "https://files.pythonhosted.org/packages/41/54/3739d35b9f94cb8dc7b0db2edca7192d5571606aa2369a664fa27e811804/tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b4ddb1849e6bf0afa6cc1c5d809fb980ca240a5fffe585a04e119519758788c0", size = 999230, upload-time = "2025-08-08T23:57:30.241Z" }, - { url = "https://files.pythonhosted.org/packages/dd/f4/ec8d43338d28d53513004ebf4cd83732a135d11011433c58bf045890cc10/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10331d08b5ecf7a780b4fe4d0281328b23ab22cdb4ff65e68d56caeda9940ecc", size = 1130076, upload-time = "2025-08-08T23:57:31.706Z" }, - { url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882", size = 1183942, upload-time = "2025-08-08T23:57:33.142Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c", size = 1244705, upload-time = "2025-08-08T23:57:34.594Z" }, - { url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1", size = 884152, upload-time = "2025-08-08T23:57:36.18Z" }, - { url = "https://files.pythonhosted.org/packages/8a/91/912b459799a025d2842566fe1e902f7f50d54a1ce8a0f236ab36b5bd5846/tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ae374c46afadad0f501046db3da1b36cd4dfbfa52af23c998773682446097cf", size = 1059743, upload-time = "2025-08-08T23:57:37.516Z" }, - { url = "https://files.pythonhosted.org/packages/8c/e9/6faa6870489ce64f5f75dcf91512bf35af5864583aee8fcb0dcb593121f5/tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25a512ff25dc6c85b58f5dd4f3d8c674dc05f96b02d66cdacf628d26a4e4866b", size = 999334, upload-time = "2025-08-08T23:57:38.595Z" }, - { url = "https://files.pythonhosted.org/packages/a1/3e/a05d1547cf7db9dc75d1461cfa7b556a3b48e0516ec29dfc81d984a145f6/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2130127471e293d385179c1f3f9cd445070c0772be73cdafb7cec9a3684c0458", size = 1129402, upload-time = "2025-08-08T23:57:39.627Z" }, - { url = "https://files.pythonhosted.org/packages/34/9a/db7a86b829e05a01fd4daa492086f708e0a8b53952e1dbc9d380d2b03677/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e43022bf2c33f733ea9b54f6a3f6b4354b909f5a73388fb1b9347ca54a069c", size = 1184046, upload-time = "2025-08-08T23:57:40.689Z" }, - { url = "https://files.pythonhosted.org/packages/9d/bb/52edc8e078cf062ed749248f1454e9e5cfd09979baadb830b3940e522015/tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:adb4e308eb64380dc70fa30493e21c93475eaa11669dea313b6bbf8210bfd013", size = 1244691, upload-time = "2025-08-08T23:57:42.251Z" }, - { url = "https://files.pythonhosted.org/packages/60/d9/884b6cd7ae2570ecdcaffa02b528522b18fef1cbbfdbcaa73799807d0d3b/tiktoken-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:ece6b76bfeeb61a125c44bbefdfccc279b5288e6007fbedc0d32bfec602df2f2", size = 884392, upload-time = "2025-08-08T23:57:43.628Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9e/eceddeffc169fc75fe0fd4f38471309f11cb1906f9b8aa39be4f5817df65/tiktoken-0.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fd9e6b23e860973cf9526544e220b223c60badf5b62e80a33509d6d40e6c8f5d", size = 1055199, upload-time = "2025-08-08T23:57:45.076Z" }, - { url = "https://files.pythonhosted.org/packages/4f/cf/5f02bfefffdc6b54e5094d2897bc80efd43050e5b09b576fd85936ee54bf/tiktoken-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76d53cee2da71ee2731c9caa747398762bda19d7f92665e882fef229cb0b5b", size = 996655, upload-time = "2025-08-08T23:57:46.304Z" }, - { url = "https://files.pythonhosted.org/packages/65/8e/c769b45ef379bc360c9978c4f6914c79fd432400a6733a8afc7ed7b0726a/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef72aab3ea240646e642413cb363b73869fed4e604dcfd69eec63dc54d603e8", size = 1128867, upload-time = "2025-08-08T23:57:47.438Z" }, - { url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd", size = 1183308, upload-time = "2025-08-08T23:57:48.566Z" }, - { url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e", size = 1244301, upload-time = "2025-08-08T23:57:49.642Z" }, - { url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f", size = 884282, upload-time = "2025-08-08T23:57:50.759Z" }, - { url = "https://files.pythonhosted.org/packages/cc/cd/a9034bcee638716d9310443818d73c6387a6a96db93cbcb0819b77f5b206/tiktoken-0.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a5f3f25ffb152ee7fec78e90a5e5ea5b03b4ea240beed03305615847f7a6ace2", size = 1055339, upload-time = "2025-08-08T23:57:51.802Z" }, - { url = "https://files.pythonhosted.org/packages/f1/91/9922b345f611b4e92581f234e64e9661e1c524875c8eadd513c4b2088472/tiktoken-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dc6e9ad16a2a75b4c4be7208055a1f707c9510541d94d9cc31f7fbdc8db41d8", size = 997080, upload-time = "2025-08-08T23:57:53.442Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9d/49cd047c71336bc4b4af460ac213ec1c457da67712bde59b892e84f1859f/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a0517634d67a8a48fd4a4ad73930c3022629a85a217d256a6e9b8b47439d1e4", size = 1128501, upload-time = "2025-08-08T23:57:54.808Z" }, - { url = "https://files.pythonhosted.org/packages/52/d5/a0dcdb40dd2ea357e83cb36258967f0ae96f5dd40c722d6e382ceee6bba9/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fb4effe60574675118b73c6fbfd3b5868e5d7a1f570d6cc0d18724b09ecf318", size = 1182743, upload-time = "2025-08-08T23:57:56.307Z" }, - { url = "https://files.pythonhosted.org/packages/3b/17/a0fc51aefb66b7b5261ca1314afa83df0106b033f783f9a7bcbe8e741494/tiktoken-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94f984c9831fd32688aef4348803b0905d4ae9c432303087bae370dc1381a2b8", size = 1244057, upload-time = "2025-08-08T23:57:57.628Z" }, - { url = "https://files.pythonhosted.org/packages/50/79/bcf350609f3a10f09fe4fc207f132085e497fdd3612f3925ab24d86a0ca0/tiktoken-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2177ffda31dec4023356a441793fed82f7af5291120751dee4d696414f54db0c", size = 883901, upload-time = "2025-08-08T23:57:59.359Z" }, + { url = "https://files.pythonhosted.org/packages/89/b3/2cb7c17b6c4cf8ca983204255d3f1d95eda7213e247e6947a0ee2c747a2c/tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3de02f5a491cfd179aec916eddb70331814bd6bf764075d39e21d5862e533970", size = 1051991, upload-time = "2025-10-06T20:21:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/27/0f/df139f1df5f6167194ee5ab24634582ba9a1b62c6b996472b0277ec80f66/tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6cfb6d9b7b54d20af21a912bfe63a2727d9cfa8fbda642fd8322c70340aad16", size = 995798, upload-time = "2025-10-06T20:21:35.579Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5d/26a691f28ab220d5edc09b9b787399b130f24327ef824de15e5d85ef21aa/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:cde24cdb1b8a08368f709124f15b36ab5524aac5fa830cc3fdce9c03d4fb8030", size = 1129865, upload-time = "2025-10-06T20:21:36.675Z" }, + { url = "https://files.pythonhosted.org/packages/b2/94/443fab3d4e5ebecac895712abd3849b8da93b7b7dec61c7db5c9c7ebe40c/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6de0da39f605992649b9cfa6f84071e3f9ef2cec458d08c5feb1b6f0ff62e134", size = 1152856, upload-time = "2025-10-06T20:21:37.873Z" }, + { url = "https://files.pythonhosted.org/packages/54/35/388f941251b2521c70dd4c5958e598ea6d2c88e28445d2fb8189eecc1dfc/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6faa0534e0eefbcafaccb75927a4a380463a2eaa7e26000f0173b920e98b720a", size = 1195308, upload-time = "2025-10-06T20:21:39.577Z" }, + { url = "https://files.pythonhosted.org/packages/f8/00/c6681c7f833dd410576183715a530437a9873fa910265817081f65f9105f/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:82991e04fc860afb933efb63957affc7ad54f83e2216fe7d319007dab1ba5892", size = 1255697, upload-time = "2025-10-06T20:21:41.154Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d2/82e795a6a9bafa034bf26a58e68fe9a89eeaaa610d51dbeb22106ba04f0a/tiktoken-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:6fb2995b487c2e31acf0a9e17647e3b242235a20832642bb7a9d1a181c0c1bb1", size = 879375, upload-time = "2025-10-06T20:21:43.201Z" }, + { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, + { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, ] [[package]] diff --git a/libs/partners/exa/README.md b/libs/partners/exa/README.md index 0764007fbfe..4e242f1d700 100644 --- a/libs/partners/exa/README.md +++ b/libs/partners/exa/README.md @@ -1,5 +1,22 @@ # langchain-exa -This package contains the LangChain integrations for Exa Cloud generative models. +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-exa?label=%20)](https://pypi.org/project/langchain-exa/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-exa)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-exa)](https://pypistats.org/packages/langchain-exa) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-exa +``` + +## πŸ€” What is this? + +This package contains the LangChain integration with Exa. + +## πŸ“– Documentation View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/exa_search) for more details. diff --git a/libs/partners/exa/langchain_exa/retrievers.py b/libs/partners/exa/langchain_exa/retrievers.py index 10d84396d66..fb58812dc99 100644 --- a/libs/partners/exa/langchain_exa/retrievers.py +++ b/libs/partners/exa/langchain_exa/retrievers.py @@ -69,8 +69,8 @@ class ExaSearchRetriever(BaseRetriever): """Whether to include a summary of the content. Can be a boolean or a dict with a custom query.""" - client: Exa = Field(default=None) - exa_api_key: SecretStr = Field(default=None) + client: Exa = Field(default=None) # type: ignore[assignment] + exa_api_key: SecretStr = Field(default=SecretStr("")) exa_base_url: str | None = None @model_validator(mode="before") diff --git a/libs/partners/exa/langchain_exa/tools.py b/libs/partners/exa/langchain_exa/tools.py index a6957f1f1eb..6612d270bd3 100644 --- a/libs/partners/exa/langchain_exa/tools.py +++ b/libs/partners/exa/langchain_exa/tools.py @@ -89,8 +89,8 @@ class ExaSearchResults(BaseTool): # type: ignore[override] "Input should be an Exa-optimized query. " "Output is a JSON array of the query results" ) - client: Exa = Field(default=None) - exa_api_key: SecretStr = Field(default=None) + client: Exa = Field(default=None) # type: ignore[assignment] + exa_api_key: SecretStr = Field(default=SecretStr("")) @model_validator(mode="before") @classmethod @@ -170,8 +170,8 @@ class ExaFindSimilarResults(BaseTool): # type: ignore[override] "Input should be an Exa-optimized query. " "Output is a JSON array of the query results" ) - client: Exa = Field(default=None) - exa_api_key: SecretStr = Field(default=None) + client: Exa = Field(default=None) # type: ignore[assignment] + exa_api_key: SecretStr = Field(default=SecretStr("")) exa_base_url: str | None = None @model_validator(mode="before") diff --git a/libs/partners/exa/pyproject.toml b/libs/partners/exa/pyproject.toml index 1e44377b5f9..113688bc398 100644 --- a/libs/partners/exa/pyproject.toml +++ b/libs/partners/exa/pyproject.toml @@ -7,22 +7,22 @@ authors = [] license = { text = "MIT" } requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", "exa-py>=1.0.8,<2.0.0" ] name = "langchain-exa" -version = "1.0.0a1" +version = "1.0.0" description = "An integration package connecting Exa and LangChain" readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/exa_search" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/exa" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-exa%22" -docs = "https://reference.langchain.com/python/integrations/langchain_exa/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/exa_search" +Documentation = "https://reference.langchain.com/python/integrations/langchain_exa/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/exa" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-exa%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ @@ -42,7 +42,6 @@ test_integration = [] typing = [ "mypy>=1.10.0,<2.0.0", "langchain-core", - "pydantic>2.0.0,<2.10.0", # TODO: support 2.10 ] diff --git a/libs/partners/exa/uv.lock b/libs/partners/exa/uv.lock index 72d7fa208c2..58a619bd278 100644 --- a/libs/partners/exa/uv.lock +++ b/libs/partners/exa/uv.lock @@ -226,7 +226,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ @@ -396,7 +396,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a6" +version = "1.0.0" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -454,7 +454,7 @@ typing = [ [[package]] name = "langchain-exa" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "exa-py" }, @@ -482,7 +482,6 @@ test = [ typing = [ { name = "langchain-core" }, { name = "mypy" }, - { name = "pydantic" }, ] [package.metadata] @@ -509,12 +508,11 @@ test-integration = [] typing = [ { name = "langchain-core", editable = "../../core" }, { name = "mypy", specifier = ">=1.10.0,<2.0.0" }, - { name = "pydantic", specifier = ">2.0.0,<2.10.0" }, ] [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -1137,83 +1135,131 @@ wheels = [ [[package]] name = "pydantic" -version = "2.9.2" +version = "2.12.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, { name = "pydantic-core" }, { name = "typing-extensions" }, + { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a9/b7/d9e3f12af310e1120c21603644a1cd86f59060e040ec5c3a80b8f05fae30/pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f", size = 769917, upload-time = "2024-09-17T15:59:54.273Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/35/d319ed522433215526689bad428a94058b6dd12190ce7ddd78618ac14b28/pydantic-2.12.2.tar.gz", hash = "sha256:7b8fa15b831a4bbde9d5b84028641ac3080a4ca2cbd4a621a661687e741624fd", size = 816358, upload-time = "2025-10-14T15:02:21.842Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/e4/ba44652d562cbf0bf320e0f3810206149c8a4e99cdbf66da82e97ab53a15/pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12", size = 434928, upload-time = "2024-09-17T15:59:51.827Z" }, + { url = "https://files.pythonhosted.org/packages/6c/98/468cb649f208a6f1279448e6e5247b37ae79cf5e4041186f1e2ef3d16345/pydantic-2.12.2-py3-none-any.whl", hash = "sha256:25ff718ee909acd82f1ff9b1a4acfd781bb23ab3739adaa7144f19a6a4e231ae", size = 460628, upload-time = "2025-10-14T15:02:19.623Z" }, ] [[package]] name = "pydantic-core" -version = "2.23.4" +version = "2.41.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e2/aa/6b6a9b9f8537b872f552ddd46dd3da230367754b6f707b8e1e963f515ea3/pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863", size = 402156, upload-time = "2024-09-16T16:06:44.786Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/8b/d3ae387f66277bd8104096d6ec0a145f4baa2966ebb2cad746c0920c9526/pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b", size = 1867835, upload-time = "2024-09-16T16:03:57.223Z" }, - { url = "https://files.pythonhosted.org/packages/46/76/f68272e4c3a7df8777798282c5e47d508274917f29992d84e1898f8908c7/pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166", size = 1776689, upload-time = "2024-09-16T16:03:59.266Z" }, - { url = "https://files.pythonhosted.org/packages/cc/69/5f945b4416f42ea3f3bc9d2aaec66c76084a6ff4ff27555bf9415ab43189/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb", size = 1800748, upload-time = "2024-09-16T16:04:01.011Z" }, - { url = "https://files.pythonhosted.org/packages/50/ab/891a7b0054bcc297fb02d44d05c50e68154e31788f2d9d41d0b72c89fdf7/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916", size = 1806469, upload-time = "2024-09-16T16:04:02.323Z" }, - { url = "https://files.pythonhosted.org/packages/31/7c/6e3fa122075d78f277a8431c4c608f061881b76c2b7faca01d317ee39b5d/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07", size = 2002246, upload-time = "2024-09-16T16:04:03.688Z" }, - { url = "https://files.pythonhosted.org/packages/ad/6f/22d5692b7ab63fc4acbc74de6ff61d185804a83160adba5e6cc6068e1128/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232", size = 2659404, upload-time = "2024-09-16T16:04:05.299Z" }, - { url = "https://files.pythonhosted.org/packages/11/ac/1e647dc1121c028b691028fa61a4e7477e6aeb5132628fde41dd34c1671f/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2", size = 2053940, upload-time = "2024-09-16T16:04:06.604Z" }, - { url = "https://files.pythonhosted.org/packages/91/75/984740c17f12c3ce18b5a2fcc4bdceb785cce7df1511a4ce89bca17c7e2d/pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f", size = 1921437, upload-time = "2024-09-16T16:04:08.071Z" }, - { url = "https://files.pythonhosted.org/packages/a0/74/13c5f606b64d93f0721e7768cd3e8b2102164866c207b8cd6f90bb15d24f/pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3", size = 1966129, upload-time = "2024-09-16T16:04:10.363Z" }, - { url = "https://files.pythonhosted.org/packages/18/03/9c4aa5919457c7b57a016c1ab513b1a926ed9b2bb7915bf8e506bf65c34b/pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071", size = 2110908, upload-time = "2024-09-16T16:04:12.412Z" }, - { url = "https://files.pythonhosted.org/packages/92/2c/053d33f029c5dc65e5cf44ff03ceeefb7cce908f8f3cca9265e7f9b540c8/pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119", size = 1735278, upload-time = "2024-09-16T16:04:13.732Z" }, - { url = "https://files.pythonhosted.org/packages/de/81/7dfe464eca78d76d31dd661b04b5f2036ec72ea8848dd87ab7375e185c23/pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f", size = 1917453, upload-time = "2024-09-16T16:04:15.996Z" }, - { url = "https://files.pythonhosted.org/packages/5d/30/890a583cd3f2be27ecf32b479d5d615710bb926d92da03e3f7838ff3e58b/pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8", size = 1865160, upload-time = "2024-09-16T16:04:18.628Z" }, - { url = "https://files.pythonhosted.org/packages/1d/9a/b634442e1253bc6889c87afe8bb59447f106ee042140bd57680b3b113ec7/pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d", size = 1776777, upload-time = "2024-09-16T16:04:20.038Z" }, - { url = "https://files.pythonhosted.org/packages/75/9a/7816295124a6b08c24c96f9ce73085032d8bcbaf7e5a781cd41aa910c891/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e", size = 1799244, upload-time = "2024-09-16T16:04:21.799Z" }, - { url = "https://files.pythonhosted.org/packages/a9/8f/89c1405176903e567c5f99ec53387449e62f1121894aa9fc2c4fdc51a59b/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607", size = 1805307, upload-time = "2024-09-16T16:04:23.324Z" }, - { url = "https://files.pythonhosted.org/packages/d5/a5/1a194447d0da1ef492e3470680c66048fef56fc1f1a25cafbea4bc1d1c48/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd", size = 2000663, upload-time = "2024-09-16T16:04:25.203Z" }, - { url = "https://files.pythonhosted.org/packages/13/a5/1df8541651de4455e7d587cf556201b4f7997191e110bca3b589218745a5/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea", size = 2655941, upload-time = "2024-09-16T16:04:27.211Z" }, - { url = "https://files.pythonhosted.org/packages/44/31/a3899b5ce02c4316865e390107f145089876dff7e1dfc770a231d836aed8/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e", size = 2052105, upload-time = "2024-09-16T16:04:28.611Z" }, - { url = "https://files.pythonhosted.org/packages/1b/aa/98e190f8745d5ec831f6d5449344c48c0627ac5fed4e5340a44b74878f8e/pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b", size = 1919967, upload-time = "2024-09-16T16:04:30.045Z" }, - { url = "https://files.pythonhosted.org/packages/ae/35/b6e00b6abb2acfee3e8f85558c02a0822e9a8b2f2d812ea8b9079b118ba0/pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0", size = 1964291, upload-time = "2024-09-16T16:04:32.376Z" }, - { url = "https://files.pythonhosted.org/packages/13/46/7bee6d32b69191cd649bbbd2361af79c472d72cb29bb2024f0b6e350ba06/pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64", size = 2109666, upload-time = "2024-09-16T16:04:33.923Z" }, - { url = "https://files.pythonhosted.org/packages/39/ef/7b34f1b122a81b68ed0a7d0e564da9ccdc9a2924c8d6c6b5b11fa3a56970/pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f", size = 1732940, upload-time = "2024-09-16T16:04:35.467Z" }, - { url = "https://files.pythonhosted.org/packages/2f/76/37b7e76c645843ff46c1d73e046207311ef298d3f7b2f7d8f6ac60113071/pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3", size = 1916804, upload-time = "2024-09-16T16:04:37.06Z" }, - { url = "https://files.pythonhosted.org/packages/74/7b/8e315f80666194b354966ec84b7d567da77ad927ed6323db4006cf915f3f/pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231", size = 1856459, upload-time = "2024-09-16T16:04:38.438Z" }, - { url = "https://files.pythonhosted.org/packages/14/de/866bdce10ed808323d437612aca1ec9971b981e1c52e5e42ad9b8e17a6f6/pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee", size = 1770007, upload-time = "2024-09-16T16:04:40.229Z" }, - { url = "https://files.pythonhosted.org/packages/dc/69/8edd5c3cd48bb833a3f7ef9b81d7666ccddd3c9a635225214e044b6e8281/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87", size = 1790245, upload-time = "2024-09-16T16:04:41.794Z" }, - { url = "https://files.pythonhosted.org/packages/80/33/9c24334e3af796ce80d2274940aae38dd4e5676298b4398eff103a79e02d/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8", size = 1801260, upload-time = "2024-09-16T16:04:43.991Z" }, - { url = "https://files.pythonhosted.org/packages/a5/6f/e9567fd90104b79b101ca9d120219644d3314962caa7948dd8b965e9f83e/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327", size = 1996872, upload-time = "2024-09-16T16:04:45.593Z" }, - { url = "https://files.pythonhosted.org/packages/2d/ad/b5f0fe9e6cfee915dd144edbd10b6e9c9c9c9d7a56b69256d124b8ac682e/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2", size = 2661617, upload-time = "2024-09-16T16:04:47.3Z" }, - { url = "https://files.pythonhosted.org/packages/06/c8/7d4b708f8d05a5cbfda3243aad468052c6e99de7d0937c9146c24d9f12e9/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36", size = 2071831, upload-time = "2024-09-16T16:04:48.893Z" }, - { url = "https://files.pythonhosted.org/packages/89/4d/3079d00c47f22c9a9a8220db088b309ad6e600a73d7a69473e3a8e5e3ea3/pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126", size = 1917453, upload-time = "2024-09-16T16:04:51.099Z" }, - { url = "https://files.pythonhosted.org/packages/e9/88/9df5b7ce880a4703fcc2d76c8c2d8eb9f861f79d0c56f4b8f5f2607ccec8/pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e", size = 1968793, upload-time = "2024-09-16T16:04:52.604Z" }, - { url = "https://files.pythonhosted.org/packages/e3/b9/41f7efe80f6ce2ed3ee3c2dcfe10ab7adc1172f778cc9659509a79518c43/pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24", size = 2116872, upload-time = "2024-09-16T16:04:54.41Z" }, - { url = "https://files.pythonhosted.org/packages/63/08/b59b7a92e03dd25554b0436554bf23e7c29abae7cce4b1c459cd92746811/pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84", size = 1738535, upload-time = "2024-09-16T16:04:55.828Z" }, - { url = "https://files.pythonhosted.org/packages/88/8d/479293e4d39ab409747926eec4329de5b7129beaedc3786eca070605d07f/pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9", size = 1917992, upload-time = "2024-09-16T16:04:57.395Z" }, - { url = "https://files.pythonhosted.org/packages/ad/ef/16ee2df472bf0e419b6bc68c05bf0145c49247a1095e85cee1463c6a44a1/pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc", size = 1856143, upload-time = "2024-09-16T16:04:59.062Z" }, - { url = "https://files.pythonhosted.org/packages/da/fa/bc3dbb83605669a34a93308e297ab22be82dfb9dcf88c6cf4b4f264e0a42/pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd", size = 1770063, upload-time = "2024-09-16T16:05:00.522Z" }, - { url = "https://files.pythonhosted.org/packages/4e/48/e813f3bbd257a712303ebdf55c8dc46f9589ec74b384c9f652597df3288d/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05", size = 1790013, upload-time = "2024-09-16T16:05:02.619Z" }, - { url = "https://files.pythonhosted.org/packages/b4/e0/56eda3a37929a1d297fcab1966db8c339023bcca0b64c5a84896db3fcc5c/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d", size = 1801077, upload-time = "2024-09-16T16:05:04.154Z" }, - { url = "https://files.pythonhosted.org/packages/04/be/5e49376769bfbf82486da6c5c1683b891809365c20d7c7e52792ce4c71f3/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510", size = 1996782, upload-time = "2024-09-16T16:05:06.931Z" }, - { url = "https://files.pythonhosted.org/packages/bc/24/e3ee6c04f1d58cc15f37bcc62f32c7478ff55142b7b3e6d42ea374ea427c/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6", size = 2661375, upload-time = "2024-09-16T16:05:08.773Z" }, - { url = "https://files.pythonhosted.org/packages/c1/f8/11a9006de4e89d016b8de74ebb1db727dc100608bb1e6bbe9d56a3cbbcce/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b", size = 2071635, upload-time = "2024-09-16T16:05:10.456Z" }, - { url = "https://files.pythonhosted.org/packages/7c/45/bdce5779b59f468bdf262a5bc9eecbae87f271c51aef628d8c073b4b4b4c/pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327", size = 1916994, upload-time = "2024-09-16T16:05:12.051Z" }, - { url = "https://files.pythonhosted.org/packages/d8/fa/c648308fe711ee1f88192cad6026ab4f925396d1293e8356de7e55be89b5/pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6", size = 1968877, upload-time = "2024-09-16T16:05:14.021Z" }, - { url = "https://files.pythonhosted.org/packages/16/16/b805c74b35607d24d37103007f899abc4880923b04929547ae68d478b7f4/pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f", size = 2116814, upload-time = "2024-09-16T16:05:15.684Z" }, - { url = "https://files.pythonhosted.org/packages/d1/58/5305e723d9fcdf1c5a655e6a4cc2a07128bf644ff4b1d98daf7a9dbf57da/pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769", size = 1738360, upload-time = "2024-09-16T16:05:17.258Z" }, - { url = "https://files.pythonhosted.org/packages/a5/ae/e14b0ff8b3f48e02394d8acd911376b7b66e164535687ef7dc24ea03072f/pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5", size = 1919411, upload-time = "2024-09-16T16:05:18.934Z" }, - { url = "https://files.pythonhosted.org/packages/13/a9/5d582eb3204464284611f636b55c0a7410d748ff338756323cb1ce721b96/pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5", size = 1857135, upload-time = "2024-09-16T16:06:10.45Z" }, - { url = "https://files.pythonhosted.org/packages/2c/57/faf36290933fe16717f97829eabfb1868182ac495f99cf0eda9f59687c9d/pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec", size = 1740583, upload-time = "2024-09-16T16:06:12.298Z" }, - { url = "https://files.pythonhosted.org/packages/91/7c/d99e3513dc191c4fec363aef1bf4c8af9125d8fa53af7cb97e8babef4e40/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480", size = 1793637, upload-time = "2024-09-16T16:06:14.092Z" }, - { url = "https://files.pythonhosted.org/packages/29/18/812222b6d18c2d13eebbb0f7cdc170a408d9ced65794fdb86147c77e1982/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068", size = 1941963, upload-time = "2024-09-16T16:06:16.757Z" }, - { url = "https://files.pythonhosted.org/packages/0f/36/c1f3642ac3f05e6bb4aec3ffc399fa3f84895d259cf5f0ce3054b7735c29/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801", size = 1915332, upload-time = "2024-09-16T16:06:18.677Z" }, - { url = "https://files.pythonhosted.org/packages/f7/ca/9c0854829311fb446020ebb540ee22509731abad886d2859c855dd29b904/pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728", size = 1957926, upload-time = "2024-09-16T16:06:20.591Z" }, - { url = "https://files.pythonhosted.org/packages/c0/1c/7836b67c42d0cd4441fcd9fafbf6a027ad4b79b6559f80cf11f89fd83648/pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433", size = 2100342, upload-time = "2024-09-16T16:06:22.888Z" }, - { url = "https://files.pythonhosted.org/packages/a9/f9/b6bcaf874f410564a78908739c80861a171788ef4d4f76f5009656672dfe/pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753", size = 1920344, upload-time = "2024-09-16T16:06:24.849Z" }, + { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, + { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, + { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, + { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, + { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, + { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, + { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, + { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, + { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, + { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, + { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, + { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, + { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, + { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, + { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, + { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, + { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, + { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, + { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, + { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, + { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, + { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, + { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, + { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, + { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, + { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, + { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, + { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, + { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, + { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, + { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, + { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, + { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, + { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, + { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, + { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, + { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, + { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, + { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, + { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, + { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, + { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, + { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, + { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, + { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, + { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, + { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, + { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, ] [[package]] @@ -1580,6 +1626,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + [[package]] name = "urllib3" version = "1.26.20" diff --git a/libs/partners/fireworks/README.md b/libs/partners/fireworks/README.md index 93df06d9ac4..1e8dc6271ba 100644 --- a/libs/partners/fireworks/README.md +++ b/libs/partners/fireworks/README.md @@ -1,5 +1,22 @@ -# LangChain-Fireworks +# langchain-fireworks + +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-fireworks?label=%20)](https://pypi.org/project/langchain-fireworks/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-fireworks)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-fireworks)](https://pypistats.org/packages/langchain-fireworks) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-fireworks +``` + +## πŸ€” What is this? This is the partner package for tying Fireworks.ai and LangChain. Fireworks really strive to provide good support for LangChain use cases, so if you run into any issues please let us know. You can reach out to us [in our Discord channel](https://discord.com/channels/1137072072808472616/) +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/fireworks) for more details. diff --git a/libs/partners/fireworks/langchain_fireworks/_compat.py b/libs/partners/fireworks/langchain_fireworks/_compat.py new file mode 100644 index 00000000000..277762a78f5 --- /dev/null +++ b/libs/partners/fireworks/langchain_fireworks/_compat.py @@ -0,0 +1,26 @@ +"""Converts between AIMessage output formats, governed by `output_version`.""" + +from __future__ import annotations + +from langchain_core.messages import AIMessage + + +def _convert_from_v1_to_chat_completions(message: AIMessage) -> AIMessage: + """Convert a v1 message to the Chat Completions format.""" + if isinstance(message.content, list): + new_content: list = [] + for block in message.content: + if isinstance(block, dict): + block_type = block.get("type") + if block_type == "text": + # Strip annotations + new_content.append({"type": "text", "text": block["text"]}) + elif block_type in ("reasoning", "tool_call"): + pass + else: + new_content.append(block) + else: + new_content.append(block) + return message.model_copy(update={"content": new_content}) + + return message diff --git a/libs/partners/fireworks/langchain_fireworks/chat_models.py b/libs/partners/fireworks/langchain_fireworks/chat_models.py index 52be161d902..c8e3b18dfb2 100644 --- a/libs/partners/fireworks/langchain_fireworks/chat_models.py +++ b/libs/partners/fireworks/langchain_fireworks/chat_models.py @@ -78,6 +78,8 @@ from pydantic import ( ) from typing_extensions import Self +from langchain_fireworks._compat import _convert_from_v1_to_chat_completions + logger = logging.getLogger(__name__) @@ -152,6 +154,9 @@ def _convert_message_to_dict(message: BaseMessage) -> dict: elif isinstance(message, HumanMessage): message_dict = {"role": "user", "content": message.content} elif isinstance(message, AIMessage): + # Translate v1 content + if message.response_metadata.get("output_version") == "v1": + message = _convert_from_v1_to_chat_completions(message) message_dict = {"role": "assistant", "content": message.content} if "function_call" in message.additional_kwargs: message_dict["function_call"] = message.additional_kwargs["function_call"] @@ -238,6 +243,7 @@ def _convert_chunk_to_message_chunk( additional_kwargs=additional_kwargs, tool_call_chunks=tool_call_chunks, usage_metadata=usage_metadata, # type: ignore[arg-type] + response_metadata={"model_provider": "fireworks"}, ) if role == "system" or default_class == SystemMessageChunk: return SystemMessageChunk(content=content) @@ -280,7 +286,11 @@ class ChatFireworks(BaseChatModel): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the LangChain object.""" + """Get the namespace of the LangChain object. + + Returns: + `["langchain", "chat_models", "fireworks"]` + """ return ["langchain", "chat_models", "fireworks"] @property @@ -296,16 +306,22 @@ class ChatFireworks(BaseChatModel): """Return whether this model can be serialized by LangChain.""" return True - client: Any = Field(default=None, exclude=True) #: :meta private: - async_client: Any = Field(default=None, exclude=True) #: :meta private: + client: Any = Field(default=None, exclude=True) + + async_client: Any = Field(default=None, exclude=True) + model_name: str = Field(alias="model") """Model name to use.""" + temperature: float | None = None """What sampling temperature to use.""" + stop: str | list[str] | None = Field(default=None, alias="stop_sequences") """Default stop sequences.""" + model_kwargs: dict[str, Any] = Field(default_factory=dict) """Holds any model parameters valid for `create` call not explicitly specified.""" + fireworks_api_key: SecretStr = Field( alias="api_key", default_factory=secret_from_env( @@ -326,18 +342,25 @@ class ChatFireworks(BaseChatModel): alias="base_url", default_factory=from_env("FIREWORKS_API_BASE", default=None) ) """Base URL path for API requests, leave blank if not using a proxy or service - emulator.""" + emulator. + """ + request_timeout: float | tuple[float, float] | Any | None = Field( default=None, alias="timeout" ) """Timeout for requests to Fireworks completion API. Can be `float`, - `httpx.Timeout` or `None`.""" + `httpx.Timeout` or `None`. + """ + streaming: bool = False """Whether to stream the results or not.""" + n: int = 1 """Number of chat completions to generate for each prompt.""" + max_tokens: int | None = None """Maximum number of tokens to generate.""" + max_retries: int | None = None """Maximum number of retries to make when generating.""" @@ -515,6 +538,8 @@ class ChatFireworks(BaseChatModel): "output_tokens": token_usage.get("completion_tokens", 0), "total_tokens": token_usage.get("total_tokens", 0), } + message.response_metadata["model_provider"] = "fireworks" + message.response_metadata["model_name"] = self.model_name generation_info = {"finish_reason": res.get("finish_reason")} if "logprobs" in res: generation_info["logprobs"] = res["logprobs"] @@ -525,7 +550,6 @@ class ChatFireworks(BaseChatModel): generations.append(gen) llm_output = { "token_usage": token_usage, - "model_name": self.model_name, "system_fingerprint": response.get("system_fingerprint", ""), } return ChatResult(generations=generations, llm_output=llm_output) @@ -674,20 +698,19 @@ class ChatFireworks(BaseChatModel): Args: schema: The output schema. Can be passed in as: - - an OpenAI function/tool schema, - - a JSON Schema, - - a `TypedDict` class (support added in 0.1.7), - - or a Pydantic class. + - An OpenAI function/tool schema, + - A JSON Schema, + - A `TypedDict` class, + - Or a Pydantic class. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. - !!! warning "Behavior changed in 0.1.7" - Added support for TypedDict class. + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. method: The method for steering model generation, one of: @@ -698,224 +721,230 @@ class ChatFireworks(BaseChatModel): - `'json_mode'`: Uses Fireworks's [JSON mode feature](https://docs.fireworks.ai/structured-responses/structured-response-formatting). - !!! warning "Behavior changed in 0.2.8" + !!! warning "Behavior changed in `langchain-fireworks` 0.2.8" Added support for `'json_schema'`. include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. kwargs: - Any additional parameters to pass to the - `langchain.runnable.Runnable` constructor. + Any additional parameters to pass to the `langchain.runnable.Runnable` + constructor. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is False and `schema` is a Pydantic class, Runnable outputs - an instance of `schema` (i.e., a Pydantic object). + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - Otherwise, if `include_raw` is False then Runnable outputs a dict. - - If `include_raw` is True, then Runnable outputs a dict with keys: - - - `'raw'`: BaseMessage - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` Example: schema=Pydantic class, method="function_calling", include_raw=False: - ```python - from typing import Optional + ```python + from typing import Optional - from langchain_fireworks import ChatFireworks - from pydantic import BaseModel, Field + from langchain_fireworks import ChatFireworks + from pydantic import BaseModel, Field - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - # If we provide default values and/or descriptions for fields, these will be passed - # to the model. This is an important part of improving a model's ability to - # correctly return structured outputs. - justification: str | None = Field( - default=None, description="A justification for the answer." - ) - - - llm = ChatFireworks( - model="accounts/fireworks/models/firefunction-v1", - temperature=0, - ) - structured_llm = llm.with_structured_output(AnswerWithJustification) - - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" + answer: str + # If we provide default values and/or descriptions for fields, these will be passed + # to the model. This is an important part of improving a model's ability to + # correctly return structured outputs. + justification: str | None = Field( + default=None, description="A justification for the answer." ) - # -> AnswerWithJustification( - # answer='They weigh the same', - # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' - # ) - ``` + + model = ChatFireworks( + model="accounts/fireworks/models/firefunction-v1", + temperature=0, + ) + structured_model = model.with_structured_output(AnswerWithJustification) + + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) + + # -> AnswerWithJustification( + # answer='They weigh the same', + # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' + # ) + ``` Example: schema=Pydantic class, method="function_calling", include_raw=True: - ```python - from langchain_fireworks import ChatFireworks - from pydantic import BaseModel + ```python + from langchain_fireworks import ChatFireworks + from pydantic import BaseModel - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str + answer: str + justification: str - llm = ChatFireworks( - model="accounts/fireworks/models/firefunction-v1", - temperature=0, - ) - structured_llm = llm.with_structured_output( - AnswerWithJustification, include_raw=True - ) + model = ChatFireworks( + model="accounts/fireworks/models/firefunction-v1", + temperature=0, + ) + structured_model = model.with_structured_output( + AnswerWithJustification, include_raw=True + ) - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" - ) - # -> { - # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), - # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), - # 'parsing_error': None - # } - ``` + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) + # -> { + # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), + # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), + # 'parsing_error': None + # } + ``` Example: schema=TypedDict class, method="function_calling", include_raw=False: - ```python - from typing_extensions import Annotated, TypedDict + ```python + from typing_extensions import Annotated, TypedDict - from langchain_fireworks import ChatFireworks + from langchain_fireworks import ChatFireworks - class AnswerWithJustification(TypedDict): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(TypedDict): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: Annotated[ - str | None, None, "A justification for the answer." - ] + answer: str + justification: Annotated[ + str | None, None, "A justification for the answer." + ] - llm = ChatFireworks( - model="accounts/fireworks/models/firefunction-v1", - temperature=0, - ) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = ChatFireworks( + model="accounts/fireworks/models/firefunction-v1", + temperature=0, + ) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" - ) - # -> { - # 'answer': 'They weigh the same', - # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' - # } - ``` + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) + # -> { + # 'answer': 'They weigh the same', + # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' + # } + ``` Example: schema=OpenAI function schema, method="function_calling", include_raw=False: - ```python - from langchain_fireworks import ChatFireworks + ```python + from langchain_fireworks import ChatFireworks - oai_schema = { - "name": "AnswerWithJustification", - "description": "An answer to the user question along with justification for the answer.", - "parameters": { - "type": "object", - "properties": { - "answer": {"type": "string"}, - "justification": { - "description": "A justification for the answer.", - "type": "string", - }, + oai_schema = { + "name": "AnswerWithJustification", + "description": "An answer to the user question along with justification for the answer.", + "parameters": { + "type": "object", + "properties": { + "answer": {"type": "string"}, + "justification": { + "description": "A justification for the answer.", + "type": "string", }, - "required": ["answer"], }, - } + "required": ["answer"], + }, + } - llm = ChatFireworks( - model="accounts/fireworks/models/firefunction-v1", - temperature=0, - ) - structured_llm = llm.with_structured_output(oai_schema) + model = ChatFireworks( + model="accounts/fireworks/models/firefunction-v1", + temperature=0, + ) + structured_model = model.with_structured_output(oai_schema) - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" - ) - # -> { - # 'answer': 'They weigh the same', - # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' - # } - ``` + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) + # -> { + # 'answer': 'They weigh the same', + # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' + # } + ``` Example: schema=Pydantic class, method="json_mode", include_raw=True: - ```python - from langchain_fireworks import ChatFireworks - from pydantic import BaseModel + ```python + from langchain_fireworks import ChatFireworks + from pydantic import BaseModel - class AnswerWithJustification(BaseModel): - answer: str - justification: str + class AnswerWithJustification(BaseModel): + answer: str + justification: str - llm = ChatFireworks( - model="accounts/fireworks/models/firefunction-v1", temperature=0 - ) - structured_llm = llm.with_structured_output( - AnswerWithJustification, method="json_mode", include_raw=True - ) + model = ChatFireworks( + model="accounts/fireworks/models/firefunction-v1", temperature=0 + ) + structured_model = model.with_structured_output( + AnswerWithJustification, method="json_mode", include_raw=True + ) - structured_llm.invoke( - "Answer the following question. " - "Make sure to return a JSON blob with keys 'answer' and 'justification'. " - "What's heavier a pound of bricks or a pound of feathers?" - ) - # -> { - # 'raw': AIMessage(content='{"answer": "They are both the same weight.", "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight."}'), - # 'parsed': AnswerWithJustification(answer='They are both the same weight.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.'), - # 'parsing_error': None - # } - ``` + structured_model.invoke( + "Answer the following question. " + "Make sure to return a JSON blob with keys 'answer' and 'justification'. " + "What's heavier a pound of bricks or a pound of feathers?" + ) + # -> { + # 'raw': AIMessage(content='{"answer": "They are both the same weight.", "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight."}'), + # 'parsed': AnswerWithJustification(answer='They are both the same weight.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.'), + # 'parsing_error': None + # } + ``` Example: schema=None, method="json_mode", include_raw=True: - ```python - structured_llm = llm.with_structured_output( - method="json_mode", include_raw=True - ) + ```python + structured_model = model.with_structured_output( + method="json_mode", include_raw=True + ) - structured_llm.invoke( - "Answer the following question. " - "Make sure to return a JSON blob with keys 'answer' and 'justification'. " - "What's heavier a pound of bricks or a pound of feathers?" - ) - # -> { - # 'raw': AIMessage(content='{"answer": "They are both the same weight.", "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight."}'), - # 'parsed': { - # 'answer': 'They are both the same weight.', - # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.' - # }, - # 'parsing_error': None - # } - ``` + structured_model.invoke( + "Answer the following question. " + "Make sure to return a JSON blob with keys 'answer' and 'justification'. " + "What's heavier a pound of bricks or a pound of feathers?" + ) + # -> { + # 'raw': AIMessage(content='{"answer": "They are both the same weight.", "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight."}'), + # 'parsed': { + # 'answer': 'They are both the same weight.', + # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.' + # }, + # 'parsing_error': None + # } + ``` """ # noqa: E501 _ = kwargs.pop("strict", None) diff --git a/libs/partners/fireworks/langchain_fireworks/embeddings.py b/libs/partners/fireworks/langchain_fireworks/embeddings.py index a445d5a7432..cd83e70d8f1 100644 --- a/libs/partners/fireworks/langchain_fireworks/embeddings.py +++ b/libs/partners/fireworks/langchain_fireworks/embeddings.py @@ -19,11 +19,11 @@ class FireworksEmbeddings(BaseModel, Embeddings): ``` Key init args β€” completion params: - model: str + model: Name of Fireworks model to use. Key init args β€” client params: - fireworks_api_key: SecretStr + fireworks_api_key: Fireworks API key. See full list of supported init args and their descriptions in the params section. @@ -65,7 +65,8 @@ class FireworksEmbeddings(BaseModel, Embeddings): ``` """ - client: OpenAI = Field(default=None, exclude=True) # type: ignore[assignment] # :meta private: + client: OpenAI = Field(default=None, exclude=True) # type: ignore[assignment] + fireworks_api_key: SecretStr = Field( alias="api_key", default_factory=secret_from_env( @@ -77,6 +78,7 @@ class FireworksEmbeddings(BaseModel, Embeddings): Automatically read from env variable `FIREWORKS_API_KEY` if not provided. """ + model: str = "nomic-ai/nomic-embed-text-v1.5" model_config = ConfigDict( diff --git a/libs/partners/fireworks/pyproject.toml b/libs/partners/fireworks/pyproject.toml index c2e1e620925..6b7df1d5cf2 100644 --- a/libs/partners/fireworks/pyproject.toml +++ b/libs/partners/fireworks/pyproject.toml @@ -3,29 +3,30 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-fireworks" +description = "An integration package connecting Fireworks and LangChain" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.0" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", "fireworks-ai>=0.13.0,<1.0.0", "openai>=2.0.0,<3.0.0", "requests>=2.0.0,<3.0.0", "aiohttp>=3.9.1,<4.0.0", ] -name = "langchain-fireworks" -version = "1.0.0a1" -description = "An integration package connecting Fireworks and LangChain" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/fireworks" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/fireworks" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-fireworks%22" -docs = "https://reference.langchain.com/python/integrations/langchain_fireworks/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/fireworks" +Documentation = "https://reference.langchain.com/python/integrations/langchain_fireworks/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/fireworks" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-fireworks%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ diff --git a/libs/partners/fireworks/tests/integration_tests/test_chat_models.py b/libs/partners/fireworks/tests/integration_tests/test_chat_models.py index c1662d3a356..2eb48409ee9 100644 --- a/libs/partners/fireworks/tests/integration_tests/test_chat_models.py +++ b/libs/partners/fireworks/tests/integration_tests/test_chat_models.py @@ -57,7 +57,9 @@ async def test_astream() -> None: full = token if full is None else full + token if token.usage_metadata is not None: chunks_with_token_counts += 1 - if token.response_metadata: + if token.response_metadata and not set(token.response_metadata.keys()).issubset( + {"model_provider", "output_version"} + ): chunks_with_response_metadata += 1 if chunks_with_token_counts != 1 or chunks_with_response_metadata != 1: msg = ( @@ -76,6 +78,7 @@ async def test_astream() -> None: ) assert isinstance(full.response_metadata["model_name"], str) assert full.response_metadata["model_name"] + assert full.response_metadata["model_provider"] == "fireworks" async def test_abatch_tags() -> None: @@ -103,6 +106,7 @@ def test_invoke() -> None: result = llm.invoke("I'm Pickle Rick", config={"tags": ["foo"]}) assert isinstance(result.content, str) + assert result.response_metadata["model_provider"] == "fireworks" def _get_joke_class( diff --git a/libs/partners/fireworks/uv.lock b/libs/partners/fireworks/uv.lock index 4e74d9275b9..0183783b3f3 100644 --- a/libs/partners/fireworks/uv.lock +++ b/libs/partners/fireworks/uv.lock @@ -338,7 +338,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ @@ -642,7 +642,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a6" +version = "1.0.0" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -700,7 +700,7 @@ typing = [ [[package]] name = "langchain-fireworks" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "aiohttp" }, @@ -766,7 +766,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -1491,7 +1491,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1499,96 +1499,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] diff --git a/libs/partners/groq/README.md b/libs/partners/groq/README.md index 0e3866901a5..b3d9ab7ef0f 100644 --- a/libs/partners/groq/README.md +++ b/libs/partners/groq/README.md @@ -1,3 +1,18 @@ # langchain-groq +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-groq?label=%20)](https://pypi.org/project/langchain-groq/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-groq)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-groq)](https://pypistats.org/packages/langchain-groq) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-groq +``` + +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/groq) for more details. diff --git a/libs/partners/groq/langchain_groq/_compat.py b/libs/partners/groq/langchain_groq/_compat.py new file mode 100644 index 00000000000..adc4f79eac0 --- /dev/null +++ b/libs/partners/groq/langchain_groq/_compat.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +import json +from typing import Any, cast + +from langchain_core.messages import content as types + + +def _convert_from_v1_to_groq( + content: list[types.ContentBlock], + model_provider: str | None, +) -> tuple[list[dict[str, Any] | str], dict]: + new_content: list = [] + new_additional_kwargs: dict = {} + for i, block in enumerate(content): + if block["type"] == "text": + new_content.append({"text": block.get("text", ""), "type": "text"}) + + elif ( + block["type"] == "reasoning" + and (reasoning := block.get("reasoning")) + and model_provider == "groq" + ): + new_additional_kwargs["reasoning_content"] = reasoning + + elif block["type"] == "server_tool_call" and model_provider == "groq": + new_block = {} + if "args" in block: + new_block["arguments"] = json.dumps(block["args"]) + if idx := block.get("extras", {}).get("index"): + new_block["index"] = idx + if block.get("name") == "web_search": + new_block["type"] = "search" + elif block.get("name") == "code_interpreter": + new_block["type"] = "python" + else: + new_block["type"] = "" + + if i < len(content) - 1 and content[i + 1]["type"] == "server_tool_result": + result = cast("types.ServerToolResult", content[i + 1]) + for k, v in result.get("extras", {}).items(): + new_block[k] = v # noqa: PERF403 + if "output" in result: + new_block["output"] = result["output"] + + if "executed_tools" not in new_additional_kwargs: + new_additional_kwargs["executed_tools"] = [] + new_additional_kwargs["executed_tools"].append(new_block) + elif block["type"] == "server_tool_result": + continue + + elif ( + block["type"] == "non_standard" + and "value" in block + and model_provider == "groq" + ): + new_content.append(block["value"]) + else: + new_content.append(block) + + # For consistency with v0 payloads, we cast single text blocks to str + if ( + len(new_content) == 1 + and isinstance(new_content[0], dict) + and new_content[0].get("type") == "text" + and (text_content := new_content[0].get("text")) + and isinstance(text_content, str) + ): + return text_content, new_additional_kwargs + + return new_content, new_additional_kwargs diff --git a/libs/partners/groq/langchain_groq/chat_models.py b/libs/partners/groq/langchain_groq/chat_models.py index 7f4bb628638..4bd308545ec 100644 --- a/libs/partners/groq/langchain_groq/chat_models.py +++ b/libs/partners/groq/langchain_groq/chat_models.py @@ -57,6 +57,7 @@ from langchain_core.utils.pydantic import is_basemodel_subclass from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator from typing_extensions import Self +from langchain_groq._compat import _convert_from_v1_to_groq from langchain_groq.version import __version__ @@ -79,13 +80,13 @@ class ChatGroq(BaseChatModel): ``` Key init args β€” completion params: - model: str + model: Name of Groq model to use, e.g. `llama-3.1-8b-instant`. - temperature: float + temperature: Sampling temperature. Ranges from `0.0` to `1.0`. - max_tokens: int | None + max_tokens: Max number of tokens to generate. - reasoning_format: Literal["parsed", "raw", "hidden] | None + reasoning_format: The format for reasoning output. Groq will default to `raw` if left undefined. @@ -95,26 +96,26 @@ class ChatGroq(BaseChatModel): - `'raw'`: Includes reasoning within think tags (e.g. `{reasoning_content}`). - `'hidden'`: Returns only the final answer content. Note: this only - supresses reasoning content in the response; the model will still perform + suppresses reasoning content in the response; the model will still perform reasoning unless overridden in `reasoning_effort`. See the [Groq documentation](https://console.groq.com/docs/reasoning#reasoning) for more details and a list of supported models. - model_kwargs: Dict[str, Any] + model_kwargs: Holds any model parameters valid for create call not explicitly specified. Key init args β€” client params: - timeout: Union[float, Tuple[float, float], Any, None] + timeout: Timeout for requests. - max_retries: int + max_retries: Max number of retries. - api_key: str | None + api_key: Groq API key. If not passed in will be read from env var `GROQ_API_KEY`. - base_url: str | None + base_url: Base URL path for API requests, leave blank if not using a proxy or service emulator. - custom_get_token_ids: Callable[[str], list[int]] | None + custom_get_token_ids: Optional encoder to use for counting tokens. See full list of supported init args and their descriptions in the params @@ -124,7 +125,7 @@ class ChatGroq(BaseChatModel): ```python from langchain_groq import ChatGroq - llm = ChatGroq( + model = ChatGroq( model="llama-3.1-8b-instant", temperature=0.0, max_retries=2, @@ -138,7 +139,7 @@ class ChatGroq(BaseChatModel): ("system", "You are a helpful translator. Translate the user sentence to French."), ("human", "I love programming."), ] - llm.invoke(messages) + model.invoke(messages) ``` ```python AIMessage(content='The English sentence "I love programming" can @@ -155,7 +156,7 @@ class ChatGroq(BaseChatModel): Stream: ```python # Streaming `text` for each content chunk received - for chunk in llm.stream(messages): + for chunk in model.stream(messages): print(chunk.text, end="") ``` @@ -173,7 +174,7 @@ class ChatGroq(BaseChatModel): ```python # Reconstructing a full response - stream = llm.stream(messages) + stream = model.stream(messages) full = next(stream) for chunk in stream: full += chunk @@ -195,7 +196,7 @@ class ChatGroq(BaseChatModel): Async: ```python - await llm.ainvoke(messages) + await model.ainvoke(messages) ``` ```python @@ -228,7 +229,7 @@ class ChatGroq(BaseChatModel): location: str = Field(..., description="The city and state, e.g. San Francisco, CA") - model_with_tools = llm.bind_tools([GetWeather, GetPopulation]) + model_with_tools = model.bind_tools([GetWeather, GetPopulation]) ai_msg = model_with_tools.invoke("What is the population of NY?") ai_msg.tool_calls ``` @@ -260,7 +261,7 @@ class ChatGroq(BaseChatModel): rating: int | None = Field(description="How funny the joke is, from 1 to 10") - structured_model = llm.with_structured_output(Joke) + structured_model = model.with_structured_output(Joke) structured_model.invoke("Tell me a joke about cats") ``` @@ -276,7 +277,7 @@ class ChatGroq(BaseChatModel): Response metadata: ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.response_metadata ``` @@ -299,14 +300,19 @@ class ChatGroq(BaseChatModel): ``` """ # noqa: E501 - client: Any = Field(default=None, exclude=True) #: :meta private: - async_client: Any = Field(default=None, exclude=True) #: :meta private: + client: Any = Field(default=None, exclude=True) + + async_client: Any = Field(default=None, exclude=True) + model_name: str = Field(alias="model") """Model name to use.""" + temperature: float = 0.7 """What sampling temperature to use.""" + stop: list[str] | str | None = Field(default=None, alias="stop_sequences") """Default stop sequences.""" + reasoning_format: Literal["parsed", "raw", "hidden"] | None = Field(default=None) """The format for reasoning output. Groq will default to raw if left undefined. @@ -315,13 +321,14 @@ class ChatGroq(BaseChatModel): `additional_kwargs.reasoning_content` field of the response. - `'raw'`: Includes reasoning within think tags (e.g. `{reasoning_content}`). - - `'hidden'`: Returns only the final answer content. Note: this only supresses + - `'hidden'`: Returns only the final answer content. Note: this only suppresses reasoning content in the response; the model will still perform reasoning unless overridden in `reasoning_effort`. See the [Groq documentation](https://console.groq.com/docs/reasoning#reasoning) for more details and a list of supported models. """ + reasoning_effort: str | None = Field(default=None) """The level of effort the model will put into reasoning. Groq will default to enabling reasoning if left undefined. @@ -330,32 +337,44 @@ class ChatGroq(BaseChatModel): for more details and a list of options and models that support setting a reasoning effort. """ + model_kwargs: dict[str, Any] = Field(default_factory=dict) """Holds any model parameters valid for `create` call not explicitly specified.""" + groq_api_key: SecretStr | None = Field( alias="api_key", default_factory=secret_from_env("GROQ_API_KEY", default=None) ) """Automatically inferred from env var `GROQ_API_KEY` if not provided.""" + groq_api_base: str | None = Field( alias="base_url", default_factory=from_env("GROQ_API_BASE", default=None) ) """Base URL path for API requests. Leave blank if not using a proxy or service - emulator.""" + emulator. + """ + # to support explicit proxy for Groq groq_proxy: str | None = Field(default_factory=from_env("GROQ_PROXY", default=None)) + request_timeout: float | tuple[float, float] | Any | None = Field( default=None, alias="timeout" ) """Timeout for requests to Groq completion API. Can be float, `httpx.Timeout` or - None.""" + `None`. + """ + max_retries: int = 2 """Maximum number of retries to make when generating.""" + streaming: bool = False """Whether to stream the results or not.""" + n: int = 1 """Number of chat completions to generate for each prompt.""" + max_tokens: int | None = None """Maximum number of tokens to generate.""" + service_tier: Literal["on_demand", "flex", "auto"] = Field(default="on_demand") """Optional parameter that you can include to specify the service tier you'd like to use for requests. @@ -370,12 +389,16 @@ class ChatGroq(BaseChatModel): See the [Groq documentation](https://console.groq.com/docs/flex-processing) for more details and a list of service tiers and descriptions. """ + default_headers: Mapping[str, str] | None = None + default_query: Mapping[str, object] | None = None + # Configure a custom httpx client. See the # [httpx documentation](https://www.python-httpx.org/api/#client) for more details. http_client: Any | None = None """Optional `httpx.Client`.""" + http_async_client: Any | None = None """Optional `httpx.AsyncClient`. Only used for async invocations. Must specify `http_client` as well if you'd like a custom client for sync invocations.""" @@ -825,22 +848,21 @@ class ChatGroq(BaseChatModel): Args: schema: The output schema. Can be passed in as: - - an OpenAI function/tool schema, - - a JSON Schema, - - a `TypedDict` class (supported added in 0.1.9), - - or a Pydantic class. + - An OpenAI function/tool schema, + - A JSON Schema, + - A `TypedDict` class, + - Or a Pydantic class. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. - !!! warning "Behavior changed in 0.1.9" - Added support for TypedDict class. + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. - !!! warning "Behavior changed in 0.3.8" + !!! warning "Behavior changed in `langchain-groq` 0.3.8" Added support for Groq's dedicated structured output feature via `method="json_schema"`. @@ -877,199 +899,210 @@ class ChatGroq(BaseChatModel): `'json_mode'` does not support streaming responses stop sequences. include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. kwargs: - Any additional parameters to pass to the - `langchain.runnable.Runnable` constructor. + Any additional parameters to pass to the `langchain.runnable.Runnable` + constructor. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is False and `schema` is a Pydantic class, Runnable outputs - an instance of `schema` (i.e., a Pydantic object). + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - Otherwise, if `include_raw` is False then Runnable outputs a dict. - - If `include_raw` is True, then Runnable outputs a dict with keys: - - - `'raw'`: BaseMessage - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` Example: schema=Pydantic class, method="function_calling", include_raw=False: - ```python - from typing import Optional + ```python + from typing import Optional - from langchain_groq import ChatGroq - from pydantic import BaseModel, Field + from langchain_groq import ChatGroq + from pydantic import BaseModel, Field - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - # If we provide default values and/or descriptions for fields, these will be passed - # to the model. This is an important part of improving a model's ability to - # correctly return structured outputs. - justification: str | None = Field(default=None, description="A justification for the answer.") + answer: str + # If we provide default values and/or descriptions for fields, these will be passed + # to the model. This is an important part of improving a model's ability to + # correctly return structured outputs. + justification: str | None = Field(default=None, description="A justification for the answer.") - llm = ChatGroq(model="openai/gpt-oss-120b", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = ChatGroq(model="openai/gpt-oss-120b", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") - # -> AnswerWithJustification( - # answer='They weigh the same', - # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' - # ) - ``` + # -> AnswerWithJustification( + # answer='They weigh the same', + # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' + # ) + ``` Example: schema=Pydantic class, method="function_calling", include_raw=True: - ```python - from langchain_groq import ChatGroq - from pydantic import BaseModel + + ```python + from langchain_groq import ChatGroq + from pydantic import BaseModel - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str + answer: str + justification: str - llm = ChatGroq(model="openai/gpt-oss-120b", temperature=0) - structured_llm = llm.with_structured_output( - AnswerWithJustification, - include_raw=True, - ) + model = ChatGroq(model="openai/gpt-oss-120b", temperature=0) + structured_model = model.with_structured_output( + AnswerWithJustification, + include_raw=True, + ) - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") - # -> { - # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), - # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), - # 'parsing_error': None - # } - ``` + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") + # -> { + # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), + # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), + # 'parsing_error': None + # } + ``` Example: schema=TypedDict class, method="function_calling", include_raw=False: - ```python - from typing_extensions import Annotated, TypedDict - from langchain_groq import ChatGroq + ```python + from typing_extensions import Annotated, TypedDict + + from langchain_groq import ChatGroq - class AnswerWithJustification(TypedDict): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(TypedDict): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: Annotated[str | None, None, "A justification for the answer."] + answer: str + justification: Annotated[str | None, None, "A justification for the answer."] - llm = ChatGroq(model="openai/gpt-oss-120b", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = ChatGroq(model="openai/gpt-oss-120b", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") + # -> { + # 'answer': 'They weigh the same', + # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' + # } + ``` + + Example: schema=OpenAI function schema, method="function_calling", include_raw=False: + + ```python + from langchain_groq import ChatGroq + + oai_schema = { + 'name': 'AnswerWithJustification', + 'description': 'An answer to the user question along with justification for the answer.', + 'parameters': { + 'type': 'object', + 'properties': { + 'answer': {'type': 'string'}, + 'justification': {'description': 'A justification for the answer.', 'type': 'string'} + }, + 'required': ['answer'] + } + + model = ChatGroq(model="openai/gpt-oss-120b", temperature=0) + structured_model = model.with_structured_output(oai_schema) + + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) # -> { # 'answer': 'They weigh the same', # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' # } - ``` - - Example: schema=OpenAI function schema, method="function_calling", include_raw=False: - ```python - from langchain_groq import ChatGroq - - oai_schema = { - 'name': 'AnswerWithJustification', - 'description': 'An answer to the user question along with justification for the answer.', - 'parameters': { - 'type': 'object', - 'properties': { - 'answer': {'type': 'string'}, - 'justification': {'description': 'A justification for the answer.', 'type': 'string'} - }, - 'required': ['answer'] - } - - llm = ChatGroq(model="openai/gpt-oss-120b", temperature=0) - structured_llm = llm.with_structured_output(oai_schema) - - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" - ) - # -> { - # 'answer': 'They weigh the same', - # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' - # } - ``` + ``` Example: schema=Pydantic class, method="json_schema", include_raw=False: - ```python - from typing import Optional - from langchain_groq import ChatGroq - from pydantic import BaseModel, Field + ```python + from typing import Optional + + from langchain_groq import ChatGroq + from pydantic import BaseModel, Field - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - # If we provide default values and/or descriptions for fields, these will be passed - # to the model. This is an important part of improving a model's ability to - # correctly return structured outputs. - justification: str | None = Field(default=None, description="A justification for the answer.") + answer: str + # If we provide default values and/or descriptions for fields, these will be passed + # to the model. This is an important part of improving a model's ability to + # correctly return structured outputs. + justification: str | None = Field(default=None, description="A justification for the answer.") - llm = ChatGroq(model="openai/gpt-oss-120b", temperature=0) - structured_llm = llm.with_structured_output( - AnswerWithJustification, - method="json_schema", - ) + model = ChatGroq(model="openai/gpt-oss-120b", temperature=0) + structured_model = model.with_structured_output( + AnswerWithJustification, + method="json_schema", + ) - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") - # -> AnswerWithJustification( - # answer='They weigh the same', - # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' - # ) - ``` + # -> AnswerWithJustification( + # answer='They weigh the same', + # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' + # ) + ``` Example: schema=Pydantic class, method="json_mode", include_raw=True: - ```python - from langchain_groq import ChatGroq - from pydantic import BaseModel + + ```python + from langchain_groq import ChatGroq + from pydantic import BaseModel - class AnswerWithJustification(BaseModel): - answer: str - justification: str + class AnswerWithJustification(BaseModel): + answer: str + justification: str - llm = ChatGroq(model="openai/gpt-oss-120b", temperature=0) - structured_llm = llm.with_structured_output( - AnswerWithJustification, method="json_mode", include_raw=True - ) + model = ChatGroq(model="openai/gpt-oss-120b", temperature=0) + structured_model = model.with_structured_output( + AnswerWithJustification, method="json_mode", include_raw=True + ) - structured_llm.invoke( - "Answer the following question. " - "Make sure to return a JSON blob with keys 'answer' and 'justification'.\n\n" - "What's heavier a pound of bricks or a pound of feathers?" - ) - # -> { - # 'raw': AIMessage(content='{\n "answer": "They are both the same weight.",\n "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight." \n}'), - # 'parsed': AnswerWithJustification(answer='They are both the same weight.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.'), - # 'parsing_error': None - # } - ``` + structured_model.invoke( + "Answer the following question. " + "Make sure to return a JSON blob with keys 'answer' and 'justification'.\n\n" + "What's heavier a pound of bricks or a pound of feathers?" + ) + # -> { + # 'raw': AIMessage(content='{\n "answer": "They are both the same weight.",\n "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight." \n}'), + # 'parsed': AnswerWithJustification(answer='They are both the same weight.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.'), + # 'parsing_error': None + # } + ``` """ # noqa: E501 _ = kwargs.pop("strict", None) @@ -1187,6 +1220,17 @@ def _convert_message_to_dict(message: BaseMessage) -> dict: elif isinstance(message, HumanMessage): message_dict = {"role": "user", "content": message.content} elif isinstance(message, AIMessage): + # Translate v1 content + if message.response_metadata.get("output_version") == "v1": + new_content, new_additional_kwargs = _convert_from_v1_to_groq( + message.content_blocks, message.response_metadata.get("model_provider") + ) + message = message.model_copy( + update={ + "content": new_content, + "additional_kwargs": new_additional_kwargs, + } + ) message_dict = {"role": "assistant", "content": message.content} # If content is a list of content blocks, filter out tool_call blocks @@ -1268,6 +1312,22 @@ def _convert_chunk_to_message_chunk( if role == "assistant" or default_class == AIMessageChunk: if reasoning := _dict.get("reasoning"): additional_kwargs["reasoning_content"] = reasoning + if executed_tools := _dict.get("executed_tools"): + additional_kwargs["executed_tools"] = [] + for executed_tool in executed_tools: + if executed_tool.get("output"): + # Tool output duplicates query and other server tool call data + additional_kwargs["executed_tools"].append( + { + k: executed_tool[k] + for k in ("index", "output") + if k in executed_tool + } + ) + else: + additional_kwargs["executed_tools"].append( + {k: executed_tool[k] for k in executed_tool if k != "output"} + ) if usage := (chunk.get("x_groq") or {}).get("usage"): input_tokens = usage.get("prompt_tokens", 0) output_tokens = usage.get("completion_tokens", 0) @@ -1282,6 +1342,7 @@ def _convert_chunk_to_message_chunk( content=content, additional_kwargs=additional_kwargs, usage_metadata=usage_metadata, # type: ignore[arg-type] + response_metadata={"model_provider": "groq"}, ) if role == "system" or default_class == SystemMessageChunk: return SystemMessageChunk(content=content) @@ -1313,6 +1374,8 @@ def _convert_dict_to_message(_dict: Mapping[str, Any]) -> BaseMessage: additional_kwargs: dict = {} if reasoning := _dict.get("reasoning"): additional_kwargs["reasoning_content"] = reasoning + if executed_tools := _dict.get("executed_tools"): + additional_kwargs["executed_tools"] = executed_tools if function_call := _dict.get("function_call"): additional_kwargs["function_call"] = dict(function_call) tool_calls = [] @@ -1332,6 +1395,7 @@ def _convert_dict_to_message(_dict: Mapping[str, Any]) -> BaseMessage: additional_kwargs=additional_kwargs, tool_calls=tool_calls, invalid_tool_calls=invalid_tool_calls, + response_metadata={"model_provider": "groq"}, ) if role == "system": return SystemMessage(content=_dict.get("content", "")) diff --git a/libs/partners/groq/pyproject.toml b/libs/partners/groq/pyproject.toml index b2b8df19cf6..c4a657e1833 100644 --- a/libs/partners/groq/pyproject.toml +++ b/libs/partners/groq/pyproject.toml @@ -3,26 +3,27 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-groq" +description = "An integration package connecting Groq and LangChain" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.0" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", "groq>=0.30.0,<1.0.0" ] -name = "langchain-groq" -version = "1.0.0a1" -description = "An integration package connecting Groq and LangChain" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/groq" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/groq" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-groq%22" -docs = "https://reference.langchain.com/python/integrations/langchain_groq/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/groq" +Documentation = "https://reference.langchain.com/python/integrations/langchain_groq/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/groq" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-groq%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ diff --git a/libs/partners/groq/tests/cassettes/test_code_interpreter.yaml.gz b/libs/partners/groq/tests/cassettes/test_code_interpreter.yaml.gz new file mode 100644 index 00000000000..495aa28f73b Binary files /dev/null and b/libs/partners/groq/tests/cassettes/test_code_interpreter.yaml.gz differ diff --git a/libs/partners/groq/tests/cassettes/test_web_search.yaml.gz b/libs/partners/groq/tests/cassettes/test_web_search.yaml.gz new file mode 100644 index 00000000000..bcda7832765 Binary files /dev/null and b/libs/partners/groq/tests/cassettes/test_web_search.yaml.gz differ diff --git a/libs/partners/groq/tests/conftest.py b/libs/partners/groq/tests/conftest.py new file mode 100644 index 00000000000..ba705a7d5d1 --- /dev/null +++ b/libs/partners/groq/tests/conftest.py @@ -0,0 +1,37 @@ +from typing import Any + +import pytest +from langchain_tests.conftest import CustomPersister, CustomSerializer +from langchain_tests.conftest import ( + _base_vcr_config as _base_vcr_config, # noqa: PLC0414 +) +from vcr import VCR # type: ignore[import-untyped] + + +def remove_request_headers(request: Any) -> Any: + for k in request.headers: + request.headers[k] = "**REDACTED**" + return request + + +def remove_response_headers(response: dict) -> dict: + for k in response["headers"]: + response["headers"][k] = "**REDACTED**" + return response + + +@pytest.fixture(scope="session") +def vcr_config(_base_vcr_config: dict) -> dict: # noqa: F811 + """Extend the default configuration coming from langchain_tests.""" + config = _base_vcr_config.copy() + config["before_record_request"] = remove_request_headers + config["before_record_response"] = remove_response_headers + config["serializer"] = "yaml.gz" + config["path_transformer"] = VCR.ensure_suffix(".yaml.gz") + + return config + + +def pytest_recording_configure(config: dict, vcr: VCR) -> None: + vcr.register_persister(CustomPersister()) + vcr.register_serializer("yaml.gz", CustomSerializer()) diff --git a/libs/partners/groq/tests/integration_tests/test_chat_models.py b/libs/partners/groq/tests/integration_tests/test_chat_models.py index 2e25e7ae627..bd43972b1f3 100644 --- a/libs/partners/groq/tests/integration_tests/test_chat_models.py +++ b/libs/partners/groq/tests/integration_tests/test_chat_models.py @@ -111,7 +111,9 @@ async def test_astream() -> None: full = token if full is None else full + token if token.usage_metadata is not None: chunks_with_token_counts += 1 - if token.response_metadata: + if token.response_metadata and not set(token.response_metadata.keys()).issubset( + {"model_provider", "output_version"} + ): chunks_with_response_metadata += 1 if chunks_with_token_counts != 1 or chunks_with_response_metadata != 1: msg = ( @@ -384,7 +386,7 @@ def test_streaming_generation_info() -> None: ) list(chat.stream("Respond with the single word Hello", stop=["o"])) generation = callback.saved_things["generation"] - # `Hello!` is two tokens, assert that that is what is returned + # `Hello!` is two tokens, assert that is what is returned assert isinstance(generation, LLMResult) assert generation.generations[0][0].text == "Hell" @@ -665,6 +667,146 @@ async def test_setting_service_tier_request_async() -> None: assert response.response_metadata.get("service_tier") == "on_demand" +@pytest.mark.vcr +def test_web_search() -> None: + llm = ChatGroq(model="groq/compound") + input_message = { + "role": "user", + "content": "Search for the weather in Boston today.", + } + full: AIMessageChunk | None = None + for chunk in llm.stream([input_message]): + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert full.additional_kwargs["reasoning_content"] + assert full.additional_kwargs["executed_tools"] + assert [block["type"] for block in full.content_blocks] == [ + "reasoning", + "server_tool_call", + "server_tool_result", + "text", + ] + + next_message = { + "role": "user", + "content": "Now search for the weather in San Francisco.", + } + response = llm.invoke([input_message, full, next_message]) + assert [block["type"] for block in response.content_blocks] == [ + "reasoning", + "server_tool_call", + "server_tool_result", + "text", + ] + + +@pytest.mark.default_cassette("test_web_search.yaml.gz") +@pytest.mark.vcr +def test_web_search_v1() -> None: + llm = ChatGroq(model="groq/compound", output_version="v1") + input_message = { + "role": "user", + "content": "Search for the weather in Boston today.", + } + full: AIMessageChunk | None = None + for chunk in llm.stream([input_message]): + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert full.additional_kwargs["reasoning_content"] + assert full.additional_kwargs["executed_tools"] + assert [block["type"] for block in full.content_blocks] == [ + "reasoning", + "server_tool_call", + "server_tool_result", + "reasoning", + "text", + ] + + next_message = { + "role": "user", + "content": "Now search for the weather in San Francisco.", + } + response = llm.invoke([input_message, full, next_message]) + assert [block["type"] for block in response.content_blocks] == [ + "reasoning", + "server_tool_call", + "server_tool_result", + "text", + ] + + +@pytest.mark.vcr +def test_code_interpreter() -> None: + llm = ChatGroq(model="groq/compound-mini") + input_message = { + "role": "user", + "content": ( + "Calculate the square root of 101 and show me the Python code you used." + ), + } + full: AIMessageChunk | None = None + for chunk in llm.stream([input_message]): + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert full.additional_kwargs["reasoning_content"] + assert full.additional_kwargs["executed_tools"] + assert [block["type"] for block in full.content_blocks] == [ + "reasoning", + "server_tool_call", + "server_tool_result", + "text", + ] + + next_message = { + "role": "user", + "content": "Now do the same for 102.", + } + response = llm.invoke([input_message, full, next_message]) + assert [block["type"] for block in response.content_blocks] == [ + "reasoning", + "server_tool_call", + "server_tool_result", + "text", + ] + + +@pytest.mark.default_cassette("test_code_interpreter.yaml.gz") +@pytest.mark.vcr +def test_code_interpreter_v1() -> None: + llm = ChatGroq(model="groq/compound-mini", output_version="v1") + input_message = { + "role": "user", + "content": ( + "Calculate the square root of 101 and show me the Python code you used." + ), + } + full: AIMessageChunk | None = None + for chunk in llm.stream([input_message]): + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert full.additional_kwargs["reasoning_content"] + assert full.additional_kwargs["executed_tools"] + assert [block["type"] for block in full.content_blocks] == [ + "reasoning", + "server_tool_call", + "server_tool_result", + "reasoning", + "text", + ] + + next_message = { + "role": "user", + "content": "Now do the same for 102.", + } + response = llm.invoke([input_message, full, next_message]) + assert [block["type"] for block in response.content_blocks] == [ + "reasoning", + "server_tool_call", + "server_tool_result", + "text", + ] + + # Groq does not currently support N > 1 # @pytest.mark.scheduled # def test_chat_multiple_completions() -> None: diff --git a/libs/partners/groq/tests/unit_tests/test_chat_models.py b/libs/partners/groq/tests/unit_tests/test_chat_models.py index eec86d508a4..bad3474cef9 100644 --- a/libs/partners/groq/tests/unit_tests/test_chat_models.py +++ b/libs/partners/groq/tests/unit_tests/test_chat_models.py @@ -54,7 +54,9 @@ def test__convert_dict_to_message_human() -> None: def test__convert_dict_to_message_ai() -> None: message = {"role": "assistant", "content": "foo"} result = _convert_dict_to_message(message) - expected_output = AIMessage(content="foo") + expected_output = AIMessage( + content="foo", response_metadata={"model_provider": "groq"} + ) assert result == expected_output @@ -80,6 +82,7 @@ def test__convert_dict_to_message_tool_call() -> None: type="tool_call", ) ], + response_metadata={"model_provider": "groq"}, ) assert result == expected_output @@ -112,7 +115,7 @@ def test__convert_dict_to_message_tool_call() -> None: name="GenerateUsername", args="oops", id="call_wm0JY6CdwOMZ4eTxHWUThDNz", - error="Function GenerateUsername arguments:\n\noops\n\nare not valid JSON. Received JSONDecodeError Expecting value: line 1 column 1 (char 0)\nFor troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/OUTPUT_PARSING_FAILURE ", # noqa: E501 + error="Function GenerateUsername arguments:\n\noops\n\nare not valid JSON. Received JSONDecodeError Expecting value: line 1 column 1 (char 0)\nFor troubleshooting, visit: https://docs.langchain.com/oss/python/langchain/errors/OUTPUT_PARSING_FAILURE ", # noqa: E501 type="invalid_tool_call", ), ], @@ -124,6 +127,7 @@ def test__convert_dict_to_message_tool_call() -> None: type="tool_call", ), ], + response_metadata={"model_provider": "groq"}, ) assert result == expected_output diff --git a/libs/partners/groq/uv.lock b/libs/partners/groq/uv.lock index 7ba39224885..5cf339a67d3 100644 --- a/libs/partners/groq/uv.lock +++ b/libs/partners/groq/uv.lock @@ -314,7 +314,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a8" +version = "1.0.0" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -372,7 +372,7 @@ typing = [ [[package]] name = "langchain-groq" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "groq" }, @@ -429,7 +429,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -1033,7 +1033,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1041,96 +1041,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] diff --git a/libs/partners/huggingface/README.md b/libs/partners/huggingface/README.md index 7e9e7cf7a3e..7a8ba6295fe 100644 --- a/libs/partners/huggingface/README.md +++ b/libs/partners/huggingface/README.md @@ -1,5 +1,22 @@ # langchain-huggingface -This package contains the LangChain integrations for huggingface related classes. +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-huggingface?label=%20)](https://pypi.org/project/langchain-huggingface/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-huggingface)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-huggingface)](https://pypistats.org/packages/langchain-huggingface) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) -View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/groq) for more details. +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-huggingface +``` + +## πŸ€” What is this? + +This package contains the LangChain integrations for Hugging Face related classes. + +## πŸ“– Documentation + +View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/huggingface) for more details. diff --git a/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py b/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py index 2f65ee79c53..8fc6037d2ea 100644 --- a/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py +++ b/libs/partners/huggingface/langchain_huggingface/chat_models/huggingface.py @@ -330,18 +330,17 @@ class ChatHuggingFace(BaseChatModel): ``` Key init args β€” completion params: - llm: `HuggingFaceTextGenInference`, `HuggingFaceEndpoint`, `HuggingFaceHub`, or - 'HuggingFacePipeline' LLM to be used. + llm: + LLM to be used. Key init args β€” client params: - custom_get_token_ids: Callable[[str], list[int]] | None + custom_get_token_ids: Optional encoder to use for counting tokens. - metadata: dict[str, Any] | None + metadata: Metadata to add to the run trace. - tags: list[str] | None + tags: Tags to add to the run trace. - tokenizer: Any - verbose: bool + verbose: Whether to print out response text. See full list of supported init args and their descriptions in the params @@ -352,7 +351,7 @@ class ChatHuggingFace(BaseChatModel): from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace - llm = HuggingFaceEndpoint( + model = HuggingFaceEndpoint( repo_id="microsoft/Phi-3-mini-4k-instruct", task="text-generation", max_new_tokens=512, @@ -360,7 +359,7 @@ class ChatHuggingFace(BaseChatModel): repetition_penalty=1.03, ) - chat = ChatHuggingFace(llm=llm, verbose=True) + chat = ChatHuggingFace(llm=model, verbose=True) ``` Invoke: @@ -500,6 +499,9 @@ class ChatHuggingFace(BaseChatModel): """Modify the likelihood of specified tokens appearing in the completion.""" streaming: bool = False """Whether to stream the results or not.""" + stream_usage: bool | None = None + """Whether to include usage metadata in streaming output. If True, an additional + message chunk will be generated during the stream including usage metadata.""" n: int | None = None """Number of chat completions to generate for each prompt.""" top_p: float | None = None @@ -511,8 +513,57 @@ class ChatHuggingFace(BaseChatModel): def __init__(self, **kwargs: Any): super().__init__(**kwargs) + + # Inherit properties from the LLM if they weren't explicitly set + self._inherit_llm_properties() + self._resolve_model_id() + def _inherit_llm_properties(self) -> None: + """Inherit properties from the wrapped LLM instance if not explicitly set.""" + if not hasattr(self, "llm") or self.llm is None: + return + + # Map of ChatHuggingFace properties to LLM properties + property_mappings = { + "temperature": "temperature", + "max_tokens": "max_new_tokens", # Different naming convention + "top_p": "top_p", + "seed": "seed", + "streaming": "streaming", + "stop": "stop_sequences", + } + + # Inherit properties from LLM and not explicitly set here + for chat_prop, llm_prop in property_mappings.items(): + if hasattr(self.llm, llm_prop): + llm_value = getattr(self.llm, llm_prop) + chat_value = getattr(self, chat_prop, None) + if not chat_value and llm_value: + setattr(self, chat_prop, llm_value) + + # Handle special cases for HuggingFaceEndpoint + if _is_huggingface_endpoint(self.llm): + # Inherit additional HuggingFaceEndpoint specific properties + endpoint_mappings = { + "frequency_penalty": "repetition_penalty", + } + + for chat_prop, llm_prop in endpoint_mappings.items(): + if hasattr(self.llm, llm_prop): + llm_value = getattr(self.llm, llm_prop) + chat_value = getattr(self, chat_prop, None) + if chat_value is None and llm_value is not None: + setattr(self, chat_prop, llm_value) + + # Inherit model_kwargs if not explicitly set + if ( + not self.model_kwargs + and hasattr(self.llm, "model_kwargs") + and isinstance(self.llm.model_kwargs, dict) + ): + self.model_kwargs = self.llm.model_kwargs.copy() + @model_validator(mode="after") def validate_llm(self) -> Self: if ( @@ -635,14 +686,40 @@ class ChatHuggingFace(BaseChatModel): ) return self._to_chat_result(llm_result) + def _should_stream_usage( + self, *, stream_usage: bool | None = None, **kwargs: Any + ) -> bool | None: + """Determine whether to include usage metadata in streaming output. + + For backwards compatibility, we check for `stream_options` passed + explicitly to kwargs or in the model_kwargs and override self.stream_usage. + """ + stream_usage_sources = [ # order of precedence + stream_usage, + kwargs.get("stream_options", {}).get("include_usage"), + self.model_kwargs.get("stream_options", {}).get("include_usage"), + self.stream_usage, + ] + for source in stream_usage_sources: + if isinstance(source, bool): + return source + return self.stream_usage + def _stream( self, messages: list[BaseMessage], stop: list[str] | None = None, run_manager: CallbackManagerForLLMRun | None = None, + *, + stream_usage: bool | None = None, **kwargs: Any, ) -> Iterator[ChatGenerationChunk]: if _is_huggingface_endpoint(self.llm): + stream_usage = self._should_stream_usage( + stream_usage=stream_usage, **kwargs + ) + if stream_usage: + kwargs["stream_options"] = {"include_usage": stream_usage} message_dicts, params = self._create_message_dicts(messages, stop) params = {**params, **kwargs, "stream": True} @@ -651,7 +728,20 @@ class ChatHuggingFace(BaseChatModel): messages=message_dicts, **params ): if len(chunk["choices"]) == 0: + if usage := chunk.get("usage"): + usage_msg = AIMessageChunk( + content="", + additional_kwargs={}, + response_metadata={}, + usage_metadata={ + "input_tokens": usage.get("prompt_tokens", 0), + "output_tokens": usage.get("completion_tokens", 0), + "total_tokens": usage.get("total_tokens", 0), + }, + ) + yield ChatGenerationChunk(message=usage_msg) continue + choice = chunk["choices"][0] message_chunk = _convert_chunk_to_message_chunk( chunk, default_chunk_class @@ -689,8 +779,13 @@ class ChatHuggingFace(BaseChatModel): messages: list[BaseMessage], stop: list[str] | None = None, run_manager: AsyncCallbackManagerForLLMRun | None = None, + *, + stream_usage: bool | None = None, **kwargs: Any, ) -> AsyncIterator[ChatGenerationChunk]: + stream_usage = self._should_stream_usage(stream_usage=stream_usage, **kwargs) + if stream_usage: + kwargs["stream_options"] = {"include_usage": stream_usage} message_dicts, params = self._create_message_dicts(messages, stop) params = {**params, **kwargs, "stream": True} @@ -700,7 +795,20 @@ class ChatHuggingFace(BaseChatModel): messages=message_dicts, **params ): if len(chunk["choices"]) == 0: + if usage := chunk.get("usage"): + usage_msg = AIMessageChunk( + content="", + additional_kwargs={}, + response_metadata={}, + usage_metadata={ + "input_tokens": usage.get("prompt_tokens", 0), + "output_tokens": usage.get("completion_tokens", 0), + "total_tokens": usage.get("total_tokens", 0), + }, + ) + yield ChatGenerationChunk(message=usage_msg) continue + choice = chunk["choices"][0] message_chunk = _convert_chunk_to_message_chunk(chunk, default_chunk_class) generation_info = {} @@ -881,9 +989,9 @@ class ChatHuggingFace(BaseChatModel): Args: schema: The output schema. Can be passed in as: - - an OpenAI function/tool schema, - - a JSON Schema, - - a typedDict class (support added in 0.1.7), + - An OpenAI function/tool schema, + - A JSON Schema, + - A `TypedDict` class Pydantic class is currently supported. @@ -894,12 +1002,18 @@ class ChatHuggingFace(BaseChatModel): - `'json_mode'`: uses JSON mode. include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. kwargs: Additional parameters to pass to the underlying LLM's @@ -907,20 +1021,19 @@ class ChatHuggingFace(BaseChatModel): method, such as `response_format` or `ls_structured_output_format`. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is False and `schema` is a Pydantic class, Runnable outputs - an instance of `schema` (i.e., a Pydantic object). + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - Otherwise, if `include_raw` is False then Runnable outputs a dict. - - If `include_raw` is True, then Runnable outputs a dict with keys: - - - `'raw'`: BaseMessage - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None - - """ # noqa: E501 + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` + """ _ = kwargs.pop("strict", None) if kwargs: msg = f"Received unsupported arguments {kwargs}" diff --git a/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface.py b/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface.py index 6d8dbd73e53..8c55348cee1 100644 --- a/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface.py +++ b/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface.py @@ -12,8 +12,6 @@ from langchain_huggingface.utils.import_utils import ( is_optimum_intel_version, ) -DEFAULT_MODEL_NAME = "sentence-transformers/all-mpnet-base-v2" - _MIN_OPTIMUM_VERSION = "1.22" @@ -37,7 +35,9 @@ class HuggingFaceEmbeddings(BaseModel, Embeddings): ``` """ - model_name: str = Field(default=DEFAULT_MODEL_NAME, alias="model") + model_name: str = Field( + default="sentence-transformers/all-mpnet-base-v2", alias="model" + ) """Model name to use.""" cache_folder: str | None = None """Path to store models. diff --git a/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py b/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py index 69d9fdf8844..e61c2292122 100644 --- a/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py +++ b/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py @@ -32,18 +32,24 @@ class HuggingFaceEndpointEmbeddings(BaseModel, Embeddings): ``` """ - client: Any = None #: :meta private: - async_client: Any = None #: :meta private: + client: Any = None + + async_client: Any = None + model: str | None = None """Model name to use.""" + provider: str | None = None """Name of the provider to use for inference with the model specified in `repo_id`. e.g. "sambanova". if not specified, defaults to HF Inference API. available providers can be found in the [huggingface_hub documentation](https://huggingface.co/docs/huggingface_hub/guides/inference#supported-providers-and-tasks).""" + repo_id: str | None = None """Huggingfacehub repository id, for backward compatibility.""" + task: str | None = "feature-extraction" """Task to call the model with.""" + model_kwargs: dict | None = None """Keyword arguments to pass to the model.""" diff --git a/libs/partners/huggingface/langchain_huggingface/llms/huggingface_endpoint.py b/libs/partners/huggingface/langchain_huggingface/llms/huggingface_endpoint.py index 4ee76521c36..f082aa0ae69 100644 --- a/libs/partners/huggingface/langchain_huggingface/llms/huggingface_endpoint.py +++ b/libs/partners/huggingface/langchain_huggingface/llms/huggingface_endpoint.py @@ -36,7 +36,7 @@ class HuggingFaceEndpoint(LLM): Example: ```python # Basic Example (no streaming) - llm = HuggingFaceEndpoint( + model = HuggingFaceEndpoint( endpoint_url="http://localhost:8010/", max_new_tokens=512, top_k=10, @@ -46,13 +46,13 @@ class HuggingFaceEndpoint(LLM): repetition_penalty=1.03, huggingfacehub_api_token="my-api-key", ) - print(llm.invoke("What is Deep Learning?")) + print(model.invoke("What is Deep Learning?")) # Streaming response example from langchain_core.callbacks.streaming_stdout import StreamingStdOutCallbackHandler callbacks = [StreamingStdOutCallbackHandler()] - llm = HuggingFaceEndpoint( + model = HuggingFaceEndpoint( endpoint_url="http://localhost:8010/", max_new_tokens=512, top_k=10, @@ -64,75 +64,99 @@ class HuggingFaceEndpoint(LLM): streaming=True, huggingfacehub_api_token="my-api-key", ) - print(llm.invoke("What is Deep Learning?")) + print(model.invoke("What is Deep Learning?")) # Basic Example (no streaming) with Mistral-Nemo-Base-2407 model using a third-party provider (Novita). - llm = HuggingFaceEndpoint( + model = HuggingFaceEndpoint( repo_id="mistralai/Mistral-Nemo-Base-2407", provider="novita", max_new_tokens=100, do_sample=False, huggingfacehub_api_token="my-api-key", ) - print(llm.invoke("What is Deep Learning?")) + print(model.invoke("What is Deep Learning?")) ``` """ # noqa: E501 endpoint_url: str | None = None """Endpoint URL to use. If repo_id is not specified then this needs to given or should be pass as env variable in `HF_INFERENCE_ENDPOINT`""" + repo_id: str | None = None """Repo to use. If endpoint_url is not specified then this needs to given""" + provider: str | None = None """Name of the provider to use for inference with the model specified in `repo_id`. e.g. "cerebras". if not specified, Defaults to "auto" i.e. the first of the providers available for the model, sorted by the user's order in https://hf.co/settings/inference-providers. available providers can be found in the [huggingface_hub documentation](https://huggingface.co/docs/huggingface_hub/guides/inference#supported-providers-and-tasks).""" + huggingfacehub_api_token: str | None = Field( default_factory=from_env("HUGGINGFACEHUB_API_TOKEN", default=None) ) + max_new_tokens: int = 512 """Maximum number of generated tokens""" + top_k: int | None = None """The number of highest probability vocabulary tokens to keep for top-k-filtering.""" + top_p: float | None = 0.95 """If set to < 1, only the smallest set of most probable tokens with probabilities that add up to `top_p` or higher are kept for generation.""" + typical_p: float | None = 0.95 """Typical Decoding mass. See [Typical Decoding for Natural Language Generation](https://arxiv.org/abs/2202.00666) for more information.""" + temperature: float | None = 0.8 """The value used to module the logits distribution.""" + repetition_penalty: float | None = None """The parameter for repetition penalty. 1.0 means no penalty. See [this paper](https://arxiv.org/pdf/1909.05858.pdf) for more details.""" + return_full_text: bool = False """Whether to prepend the prompt to the generated text""" + truncate: int | None = None """Truncate inputs tokens to the given size""" + stop_sequences: list[str] = Field(default_factory=list) """Stop generating tokens if a member of `stop_sequences` is generated""" + seed: int | None = None """Random sampling seed""" + inference_server_url: str = "" """text-generation-inference instance base url""" + timeout: int = 120 """Timeout in seconds""" + streaming: bool = False """Whether to generate a stream of tokens asynchronously""" + do_sample: bool = False """Activate logits sampling""" + watermark: bool = False """Watermarking with [A Watermark for Large Language Models] (https://arxiv.org/abs/2301.10226)""" + server_kwargs: dict[str, Any] = Field(default_factory=dict) """Holds any text-generation-inference server parameters not explicitly specified""" + model_kwargs: dict[str, Any] = Field(default_factory=dict) """Holds any model parameters valid for `call` not explicitly specified""" + model: str - client: Any = None #: :meta private: - async_client: Any = None #: :meta private: + + client: Any = None + + async_client: Any = None + task: str | None = None """Task to call the model with. Should be a task that returns `generated_text`.""" diff --git a/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py b/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py index f6edf545536..ba646f1309f 100644 --- a/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py +++ b/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py @@ -71,15 +71,19 @@ class HuggingFacePipeline(BaseLLM): ``` """ - pipeline: Any = None #: :meta private: + pipeline: Any = None + model_id: str | None = None """The model name. If not set explicitly by the user, it will be inferred from the provided pipeline (if available). If neither is provided, the DEFAULT_MODEL_ID will be used.""" + model_kwargs: dict | None = None """Keyword arguments passed to the model.""" + pipeline_kwargs: dict | None = None """Keyword arguments passed to the pipeline.""" + batch_size: int = DEFAULT_BATCH_SIZE """Batch size to use when passing multiple documents to generate.""" diff --git a/libs/partners/huggingface/pyproject.toml b/libs/partners/huggingface/pyproject.toml index 00f279553d0..28c00e661e2 100644 --- a/libs/partners/huggingface/pyproject.toml +++ b/libs/partners/huggingface/pyproject.toml @@ -3,27 +3,28 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-huggingface" +description = "An integration package connecting Hugging Face and LangChain." license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.1" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=0.3.76,<2.0.0", + "langchain-core>=1.0.3,<2.0.0", "tokenizers>=0.19.1,<1.0.0", "huggingface-hub>=0.33.4,<1.0.0", ] -name = "langchain-huggingface" -version = "0.3.1" -description = "An integration package connecting Hugging Face and LangChain." -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/huggingface" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/huggingface" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-huggingface%22" -docs = "https://reference.langchain.com/python/integrations/langchain_huggingface/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/huggingface" +Documentation = "https://reference.langchain.com/python/integrations/langchain_huggingface/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/huggingface" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-huggingface%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [project.optional-dependencies] full = [ diff --git a/libs/partners/huggingface/tests/integration_tests/test_chat_models.py b/libs/partners/huggingface/tests/integration_tests/test_chat_models.py new file mode 100644 index 00000000000..4a64f7f0b5a --- /dev/null +++ b/libs/partners/huggingface/tests/integration_tests/test_chat_models.py @@ -0,0 +1,22 @@ +from langchain_core.messages import AIMessageChunk + +from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint + + +def test_stream_usage() -> None: + """Test we are able to configure stream options on models that require it.""" + llm = HuggingFaceEndpoint( # type: ignore[call-arg] # (model is inferred in class) + repo_id="google/gemma-3-27b-it", + task="conversational", + provider="nebius", + ) + + model = ChatHuggingFace(llm=llm, stream_usage=True) + + full: AIMessageChunk | None = None + for chunk in model.stream("hello"): + assert isinstance(chunk, AIMessageChunk) + full = chunk if full is None else full + chunk + + assert isinstance(full, AIMessageChunk) + assert full.usage_metadata diff --git a/libs/partners/huggingface/tests/unit_tests/test_chat_models.py b/libs/partners/huggingface/tests/unit_tests/test_chat_models.py index 7bd15685dd6..377c60a7539 100644 --- a/libs/partners/huggingface/tests/unit_tests/test_chat_models.py +++ b/libs/partners/huggingface/tests/unit_tests/test_chat_models.py @@ -23,6 +23,17 @@ from langchain_huggingface.llms import HuggingFaceEndpoint def mock_llm() -> Mock: llm = Mock(spec=HuggingFaceEndpoint) llm.inference_server_url = "test endpoint url" + llm.temperature = 0.7 + llm.max_new_tokens = 512 + llm.top_p = 0.9 + llm.seed = 42 + llm.streaming = True + llm.repetition_penalty = 1.1 + llm.stop_sequences = ["", "<|end|>"] + llm.model_kwargs = {"do_sample": True, "top_k": 50} + llm.server_kwargs = {"timeout": 120} + llm.repo_id = "test/model" + llm.model = "test/model" return llm @@ -209,3 +220,108 @@ def test_bind_tools(chat_hugging_face: Any) -> None: _, kwargs = mock_super_bind.call_args assert kwargs["tools"] == tools assert kwargs["tool_choice"] == "auto" + + +def test_property_inheritance_integration(chat_hugging_face: Any) -> None: + """Test that ChatHuggingFace inherits params from LLM object.""" + assert getattr(chat_hugging_face, "temperature", None) == 0.7 + assert getattr(chat_hugging_face, "max_tokens", None) == 512 + assert getattr(chat_hugging_face, "top_p", None) == 0.9 + assert getattr(chat_hugging_face, "streaming", None) is True + + +def test_default_params_includes_inherited_values(chat_hugging_face: Any) -> None: + """Test that _default_params includes inherited max_tokens from max_new_tokens.""" + params = chat_hugging_face._default_params + assert params["max_tokens"] == 512 # inherited from LLM's max_new_tokens + assert params["temperature"] == 0.7 # inherited from LLM's temperature + assert params["stream"] is True # inherited from LLM's streaming + + +def test_create_message_dicts_includes_inherited_params(chat_hugging_face: Any) -> None: + """Test that _create_message_dicts includes inherited parameters in API call.""" + messages = [HumanMessage(content="test message")] + message_dicts, params = chat_hugging_face._create_message_dicts(messages, None) + + # Verify inherited parameters are included + assert params["max_tokens"] == 512 + assert params["temperature"] == 0.7 + assert params["stream"] is True + + # Verify message conversion + assert len(message_dicts) == 1 + assert message_dicts[0]["role"] == "user" + assert message_dicts[0]["content"] == "test message" + + +def test_model_kwargs_inheritance(mock_llm: Any) -> None: + """Test that model_kwargs are inherited when not explicitly set.""" + with patch( + "langchain_huggingface.chat_models.huggingface.ChatHuggingFace._resolve_model_id" + ): + chat = ChatHuggingFace(llm=mock_llm) + assert chat.model_kwargs == {"do_sample": True, "top_k": 50} + + +def test_huggingface_endpoint_specific_inheritance(mock_llm: Any) -> None: + """Test HuggingFaceEndpoint specific parameter inheritance.""" + with ( + patch( + "langchain_huggingface.chat_models.huggingface.ChatHuggingFace._resolve_model_id" + ), + patch( + "langchain_huggingface.chat_models.huggingface._is_huggingface_endpoint", + return_value=True, + ), + ): + chat = ChatHuggingFace(llm=mock_llm) + assert ( + getattr(chat, "frequency_penalty", None) == 1.1 + ) # from repetition_penalty + + +def test_parameter_precedence_explicit_over_inherited(mock_llm: Any) -> None: + """Test that explicitly set parameters take precedence over inherited ones.""" + with patch( + "langchain_huggingface.chat_models.huggingface.ChatHuggingFace._resolve_model_id" + ): + # Explicitly set max_tokens to override inheritance + chat = ChatHuggingFace(llm=mock_llm, max_tokens=256, temperature=0.5) + assert chat.max_tokens == 256 # explicit value, not inherited 512 + assert chat.temperature == 0.5 # explicit value, not inherited 0.7 + + +def test_inheritance_with_no_llm_properties(mock_llm: Any) -> None: + """Test inheritance when LLM doesn't have expected properties.""" + # Remove some properties from mock + del mock_llm.temperature + del mock_llm.top_p + + with patch( + "langchain_huggingface.chat_models.huggingface.ChatHuggingFace._resolve_model_id" + ): + chat = ChatHuggingFace(llm=mock_llm) + # Should still inherit available properties + assert chat.max_tokens == 512 # max_new_tokens still available + # Missing properties should remain None/default + assert getattr(chat, "temperature", None) is None + assert getattr(chat, "top_p", None) is None + + +def test_inheritance_with_empty_llm() -> None: + """Test that inheritance handles LLM with no relevant attributes gracefully.""" + with patch( + "langchain_huggingface.chat_models.huggingface.ChatHuggingFace._resolve_model_id" + ): + # Create a minimal mock LLM that passes validation but has no + # inheritance attributes + empty_llm = Mock(spec=HuggingFaceEndpoint) + empty_llm.repo_id = "test/model" + empty_llm.model = "test/model" + # Mock doesn't have the inheritance attributes by default + + chat = ChatHuggingFace(llm=empty_llm) + # Properties should remain at their default values when LLM has no + # relevant attrs + assert chat.max_tokens is None + assert chat.temperature is None diff --git a/libs/partners/huggingface/uv.lock b/libs/partners/huggingface/uv.lock index 66ceb903a0e..ccbe221c367 100644 --- a/libs/partners/huggingface/uv.lock +++ b/libs/partners/huggingface/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.10.0, <4.0.0" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -904,7 +904,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a5" +version = "1.0.3" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -938,6 +938,7 @@ test = [ { name = "blockbuster", specifier = ">=1.5.18,<1.6.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "grandalf", specifier = ">=0.8.0,<1.0.0" }, + { name = "langchain-model-profiles", directory = "../../model-profiles" }, { name = "langchain-tests", directory = "../../standard-tests" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, @@ -954,6 +955,7 @@ test = [ ] test-integration = [] typing = [ + { name = "langchain-model-profiles", directory = "../../model-profiles" }, { name = "langchain-text-splitters", directory = "../../text-splitters" }, { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, @@ -962,7 +964,7 @@ typing = [ [[package]] name = "langchain-huggingface" -version = "0.3.1" +version = "1.0.1" source = { editable = "." } dependencies = [ { name = "huggingface-hub" }, @@ -1040,7 +1042,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a1" +version = "1.0.1" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -1735,10 +1737,10 @@ wheels = [ [[package]] name = "nvidia-nccl-cu12" -version = "2.27.3" +version = "2.27.5" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/5b/4e4fff7bad39adf89f735f2bc87248c81db71205b62bcc0d5ca5b606b3c3/nvidia_nccl_cu12-2.27.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adf27ccf4238253e0b826bce3ff5fa532d65fc42322c8bfdfaf28024c0fbe039", size = 322364134, upload-time = "2025-06-03T21:58:04.013Z" }, + { url = "https://files.pythonhosted.org/packages/6e/89/f7a07dc961b60645dbbf42e80f2bc85ade7feb9a491b11a1e973aa00071f/nvidia_nccl_cu12-2.27.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ad730cf15cb5d25fe849c6e6ca9eb5b76db16a80f13f425ac68d8e2e55624457", size = 322348229, upload-time = "2025-06-26T04:11:28.385Z" }, ] [[package]] @@ -1749,6 +1751,14 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f6/74/86a07f1d0f42998ca31312f998bd3b9a7eff7f52378f4f270c8679c77fb9/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88", size = 39254836, upload-time = "2025-03-07T01:49:55.661Z" }, ] +[[package]] +name = "nvidia-nvshmem-cu12" +version = "3.3.20" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/6c/99acb2f9eb85c29fc6f3a7ac4dccfd992e22666dd08a642b303311326a97/nvidia_nvshmem_cu12-3.3.20-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d00f26d3f9b2e3c3065be895e3059d6479ea5c638a3f38c9fec49b1b9dd7c1e5", size = 124657145, upload-time = "2025-08-04T20:25:19.995Z" }, +] + [[package]] name = "nvidia-nvtx-cu12" version = "12.8.90" @@ -1836,42 +1846,57 @@ wheels = [ [[package]] name = "ormsgpack" -version = "1.10.0" +version = "1.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/36/44eed5ef8ce93cded76a576780bab16425ce7876f10d3e2e6265e46c21ea/ormsgpack-1.10.0.tar.gz", hash = "sha256:7f7a27efd67ef22d7182ec3b7fa7e9d147c3ad9be2a24656b23c989077e08b16", size = 58629, upload-time = "2025-05-24T19:07:53.944Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/f8/224c342c0e03e131aaa1a1f19aa2244e167001783a433f4eed10eedd834b/ormsgpack-1.11.0.tar.gz", hash = "sha256:7c9988e78fedba3292541eb3bb274fa63044ef4da2ddb47259ea70c05dee4206", size = 49357, upload-time = "2025-10-08T17:29:15.621Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/74/c2dd5daf069e3798d09d5746000f9b210de04df83834e5cb47f0ace51892/ormsgpack-1.10.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8a52c7ce7659459f3dc8dec9fd6a6c76f855a0a7e2b61f26090982ac10b95216", size = 376280, upload-time = "2025-05-24T19:06:51.3Z" }, - { url = "https://files.pythonhosted.org/packages/78/7b/30ff4bffb709e8a242005a8c4d65714fd96308ad640d31cff1b85c0d8cc4/ormsgpack-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:060f67fe927582f4f63a1260726d019204b72f460cf20930e6c925a1d129f373", size = 204335, upload-time = "2025-05-24T19:06:53.442Z" }, - { url = "https://files.pythonhosted.org/packages/8f/3f/c95b7d142819f801a0acdbd04280e8132e43b6e5a8920173e8eb92ea0e6a/ormsgpack-1.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7058ef6092f995561bf9f71d6c9a4da867b6cc69d2e94cb80184f579a3ceed5", size = 215373, upload-time = "2025-05-24T19:06:55.153Z" }, - { url = "https://files.pythonhosted.org/packages/ef/1a/e30f4bcf386db2015d1686d1da6110c95110294d8ea04f86091dd5eb3361/ormsgpack-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6f3509c1b0e51b15552d314b1d409321718122e90653122ce4b997f01453a", size = 216469, upload-time = "2025-05-24T19:06:56.555Z" }, - { url = "https://files.pythonhosted.org/packages/96/fc/7e44aeade22b91883586f45b7278c118fd210834c069774891447f444fc9/ormsgpack-1.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:51c1edafd5c72b863b1f875ec31c529f09c872a5ff6fe473b9dfaf188ccc3227", size = 384590, upload-time = "2025-05-24T19:06:58.286Z" }, - { url = "https://files.pythonhosted.org/packages/ec/78/f92c24e8446697caa83c122f10b6cf5e155eddf81ce63905c8223a260482/ormsgpack-1.10.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c780b44107a547a9e9327270f802fa4d6b0f6667c9c03c3338c0ce812259a0f7", size = 478891, upload-time = "2025-05-24T19:07:00.126Z" }, - { url = "https://files.pythonhosted.org/packages/5a/75/87449690253c64bea2b663c7c8f2dbc9ad39d73d0b38db74bdb0f3947b16/ormsgpack-1.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:137aab0d5cdb6df702da950a80405eb2b7038509585e32b4e16289604ac7cb84", size = 390121, upload-time = "2025-05-24T19:07:01.777Z" }, - { url = "https://files.pythonhosted.org/packages/69/cc/c83257faf3a5169ec29dd87121317a25711da9412ee8c1e82f2e1a00c0be/ormsgpack-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:3e666cb63030538fa5cd74b1e40cb55b6fdb6e2981f024997a288bf138ebad07", size = 121196, upload-time = "2025-05-24T19:07:03.47Z" }, - { url = "https://files.pythonhosted.org/packages/30/27/7da748bc0d7d567950a378dee5a32477ed5d15462ab186918b5f25cac1ad/ormsgpack-1.10.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:4bb7df307e17b36cbf7959cd642c47a7f2046ae19408c564e437f0ec323a7775", size = 376275, upload-time = "2025-05-24T19:07:05.128Z" }, - { url = "https://files.pythonhosted.org/packages/7b/65/c082cc8c74a914dbd05af0341c761c73c3d9960b7432bbf9b8e1e20811af/ormsgpack-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8817ae439c671779e1127ee62f0ac67afdeaeeacb5f0db45703168aa74a2e4af", size = 204335, upload-time = "2025-05-24T19:07:06.423Z" }, - { url = "https://files.pythonhosted.org/packages/46/62/17ef7e5d9766c79355b9c594cc9328c204f1677bc35da0595cc4e46449f0/ormsgpack-1.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f345f81e852035d80232e64374d3a104139d60f8f43c6c5eade35c4bac5590e", size = 215372, upload-time = "2025-05-24T19:07:08.149Z" }, - { url = "https://files.pythonhosted.org/packages/4e/92/7c91e8115fc37e88d1a35e13200fda3054ff5d2e5adf017345e58cea4834/ormsgpack-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21de648a1c7ef692bdd287fb08f047bd5371d7462504c0a7ae1553c39fee35e3", size = 216470, upload-time = "2025-05-24T19:07:09.903Z" }, - { url = "https://files.pythonhosted.org/packages/2c/86/ce053c52e2517b90e390792d83e926a7a523c1bce5cc63d0a7cd05ce6cf6/ormsgpack-1.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3a7d844ae9cbf2112c16086dd931b2acefce14cefd163c57db161170c2bfa22b", size = 384591, upload-time = "2025-05-24T19:07:11.24Z" }, - { url = "https://files.pythonhosted.org/packages/07/e8/2ad59f2ab222c6029e500bc966bfd2fe5cb099f8ab6b7ebeb50ddb1a6fe5/ormsgpack-1.10.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e4d80585403d86d7f800cf3d0aafac1189b403941e84e90dd5102bb2b92bf9d5", size = 478892, upload-time = "2025-05-24T19:07:13.147Z" }, - { url = "https://files.pythonhosted.org/packages/f4/73/f55e4b47b7b18fd8e7789680051bf830f1e39c03f1d9ed993cd0c3e97215/ormsgpack-1.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:da1de515a87e339e78a3ccf60e39f5fb740edac3e9e82d3c3d209e217a13ac08", size = 390122, upload-time = "2025-05-24T19:07:14.557Z" }, - { url = "https://files.pythonhosted.org/packages/f7/87/073251cdb93d4c6241748568b3ad1b2a76281fb2002eed16a3a4043d61cf/ormsgpack-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:57c4601812684024132cbb32c17a7d4bb46ffc7daf2fddf5b697391c2c4f142a", size = 121197, upload-time = "2025-05-24T19:07:15.981Z" }, - { url = "https://files.pythonhosted.org/packages/99/95/f3ab1a7638f6aa9362e87916bb96087fbbc5909db57e19f12ad127560e1e/ormsgpack-1.10.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:4e159d50cd4064d7540e2bc6a0ab66eab70b0cc40c618b485324ee17037527c0", size = 376806, upload-time = "2025-05-24T19:07:17.221Z" }, - { url = "https://files.pythonhosted.org/packages/6c/2b/42f559f13c0b0f647b09d749682851d47c1a7e48308c43612ae6833499c8/ormsgpack-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb47c85f3a866e29279d801115b554af0fefc409e2ed8aa90aabfa77efe5cc6", size = 204433, upload-time = "2025-05-24T19:07:18.569Z" }, - { url = "https://files.pythonhosted.org/packages/45/42/1ca0cb4d8c80340a89a4af9e6d8951fb8ba0d076a899d2084eadf536f677/ormsgpack-1.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c28249574934534c9bd5dce5485c52f21bcea0ee44d13ece3def6e3d2c3798b5", size = 215547, upload-time = "2025-05-24T19:07:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/0a/38/184a570d7c44c0260bc576d1daaac35b2bfd465a50a08189518505748b9a/ormsgpack-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1957dcadbb16e6a981cd3f9caef9faf4c2df1125e2a1b702ee8236a55837ce07", size = 216746, upload-time = "2025-05-24T19:07:21.83Z" }, - { url = "https://files.pythonhosted.org/packages/69/2f/1aaffd08f6b7fdc2a57336a80bdfb8df24e6a65ada5aa769afecfcbc6cc6/ormsgpack-1.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3b29412558c740bf6bac156727aa85ac67f9952cd6f071318f29ee72e1a76044", size = 384783, upload-time = "2025-05-24T19:07:23.674Z" }, - { url = "https://files.pythonhosted.org/packages/a9/63/3e53d6f43bb35e00c98f2b8ab2006d5138089ad254bc405614fbf0213502/ormsgpack-1.10.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6933f350c2041ec189fe739f0ba7d6117c8772f5bc81f45b97697a84d03020dd", size = 479076, upload-time = "2025-05-24T19:07:25.047Z" }, - { url = "https://files.pythonhosted.org/packages/b8/19/fa1121b03b61402bb4d04e35d164e2320ef73dfb001b57748110319dd014/ormsgpack-1.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a86de06d368fcc2e58b79dece527dc8ca831e0e8b9cec5d6e633d2777ec93d0", size = 390447, upload-time = "2025-05-24T19:07:26.568Z" }, - { url = "https://files.pythonhosted.org/packages/b0/0d/73143ecb94ac4a5dcba223402139240a75dee0cc6ba8a543788a5646407a/ormsgpack-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:35fa9f81e5b9a0dab42e09a73f7339ecffdb978d6dbf9deb2ecf1e9fc7808722", size = 121401, upload-time = "2025-05-24T19:07:28.308Z" }, - { url = "https://files.pythonhosted.org/packages/61/f8/ec5f4e03268d0097545efaab2893aa63f171cf2959cb0ea678a5690e16a1/ormsgpack-1.10.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8d816d45175a878993b7372bd5408e0f3ec5a40f48e2d5b9d8f1cc5d31b61f1f", size = 376806, upload-time = "2025-05-24T19:07:29.555Z" }, - { url = "https://files.pythonhosted.org/packages/c1/19/b3c53284aad1e90d4d7ed8c881a373d218e16675b8b38e3569d5b40cc9b8/ormsgpack-1.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90345ccb058de0f35262893751c603b6376b05f02be2b6f6b7e05d9dd6d5643", size = 204433, upload-time = "2025-05-24T19:07:30.977Z" }, - { url = "https://files.pythonhosted.org/packages/09/0b/845c258f59df974a20a536c06cace593698491defdd3d026a8a5f9b6e745/ormsgpack-1.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144b5e88f1999433e54db9d637bae6fe21e935888be4e3ac3daecd8260bd454e", size = 215549, upload-time = "2025-05-24T19:07:32.345Z" }, - { url = "https://files.pythonhosted.org/packages/61/56/57fce8fb34ca6c9543c026ebebf08344c64dbb7b6643d6ddd5355d37e724/ormsgpack-1.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2190b352509d012915921cca76267db136cd026ddee42f1b0d9624613cc7058c", size = 216747, upload-time = "2025-05-24T19:07:34.075Z" }, - { url = "https://files.pythonhosted.org/packages/b8/3f/655b5f6a2475c8d209f5348cfbaaf73ce26237b92d79ef2ad439407dd0fa/ormsgpack-1.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:86fd9c1737eaba43d3bb2730add9c9e8b5fbed85282433705dd1b1e88ea7e6fb", size = 384785, upload-time = "2025-05-24T19:07:35.83Z" }, - { url = "https://files.pythonhosted.org/packages/4b/94/687a0ad8afd17e4bce1892145d6a1111e58987ddb176810d02a1f3f18686/ormsgpack-1.10.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:33afe143a7b61ad21bb60109a86bb4e87fec70ef35db76b89c65b17e32da7935", size = 479076, upload-time = "2025-05-24T19:07:37.533Z" }, - { url = "https://files.pythonhosted.org/packages/c8/34/68925232e81e0e062a2f0ac678f62aa3b6f7009d6a759e19324dbbaebae7/ormsgpack-1.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f23d45080846a7b90feabec0d330a9cc1863dc956728412e4f7986c80ab3a668", size = 390446, upload-time = "2025-05-24T19:07:39.469Z" }, - { url = "https://files.pythonhosted.org/packages/12/ad/f4e1a36a6d1714afb7ffb74b3ababdcb96529cf4e7a216f9f7c8eda837b6/ormsgpack-1.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:534d18acb805c75e5fba09598bf40abe1851c853247e61dda0c01f772234da69", size = 121399, upload-time = "2025-05-24T19:07:40.854Z" }, + { url = "https://files.pythonhosted.org/packages/ff/3d/6996193cb2babc47fc92456223bef7d141065357ad4204eccf313f47a7b3/ormsgpack-1.11.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:03d4e658dd6e1882a552ce1d13cc7b49157414e7d56a4091fbe7823225b08cba", size = 367965, upload-time = "2025-10-08T17:28:06.736Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/c83b805dd9caebb046f4ceeed3706d0902ed2dbbcf08b8464e89f2c52e05/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb67eb913c2b703f0ed39607fc56e50724dd41f92ce080a586b4d6149eb3fe4", size = 195209, upload-time = "2025-10-08T17:28:08.395Z" }, + { url = "https://files.pythonhosted.org/packages/3a/17/427d9c4f77b120f0af01d7a71d8144771c9388c2a81f712048320e31353b/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e54175b92411f73a238e5653a998627f6660de3def37d9dd7213e0fd264ca56", size = 205868, upload-time = "2025-10-08T17:28:09.688Z" }, + { url = "https://files.pythonhosted.org/packages/82/32/a9ce218478bdbf3fee954159900e24b314ab3064f7b6a217ccb1e3464324/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca2b197f4556e1823d1319869d4c5dc278be335286d2308b0ed88b59a5afcc25", size = 207391, upload-time = "2025-10-08T17:28:11.031Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d3/4413fe7454711596fdf08adabdfa686580e4656702015108e4975f00a022/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc62388262f58c792fe1e450e1d9dbcc174ed2fb0b43db1675dd7c5ff2319d6a", size = 377078, upload-time = "2025-10-08T17:28:12.39Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ad/13fae555a45e35ca1ca929a27c9ee0a3ecada931b9d44454658c543f9b9c/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c48bc10af74adfbc9113f3fb160dc07c61ad9239ef264c17e449eba3de343dc2", size = 470776, upload-time = "2025-10-08T17:28:13.484Z" }, + { url = "https://files.pythonhosted.org/packages/36/60/51178b093ffc4e2ef3381013a67223e7d56224434fba80047249f4a84b26/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a608d3a1d4fa4acdc5082168a54513cff91f47764cef435e81a483452f5f7647", size = 380862, upload-time = "2025-10-08T17:28:14.747Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e3/1cb6c161335e2ae7d711ecfb007a31a3936603626e347c13e5e53b7c7cf8/ormsgpack-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:97217b4f7f599ba45916b9c4c4b1d5656e8e2a4d91e2e191d72a7569d3c30923", size = 112058, upload-time = "2025-10-08T17:28:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/90164d00e8e94b48eff8a17bc2f4be6b71ae356a00904bc69d5e8afe80fb/ormsgpack-1.11.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c7be823f47d8e36648d4bc90634b93f02b7d7cc7480081195f34767e86f181fb", size = 367964, upload-time = "2025-10-08T17:28:16.778Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c2/fb6331e880a3446c1341e72c77bd5a46da3e92a8e2edf7ea84a4c6c14fff/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68accf15d1b013812755c0eb7a30e1fc2f81eb603a1a143bf0cda1b301cfa797", size = 195209, upload-time = "2025-10-08T17:28:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/50/4943fb5df8cc02da6b7b1ee2c2a7fb13aebc9f963d69280b1bb02b1fb178/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:805d06fb277d9a4e503c0c707545b49cde66cbb2f84e5cf7c58d81dfc20d8658", size = 205869, upload-time = "2025-10-08T17:28:19.01Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fa/e7e06835bfea9adeef43915143ce818098aecab0cbd3df584815adf3e399/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1e57cdf003e77acc43643bda151dc01f97147a64b11cdee1380bb9698a7601c", size = 207391, upload-time = "2025-10-08T17:28:20.352Z" }, + { url = "https://files.pythonhosted.org/packages/33/f0/f28a19e938a14ec223396e94f4782fbcc023f8c91f2ab6881839d3550f32/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:37fc05bdaabd994097c62e2f3e08f66b03f856a640ede6dc5ea340bd15b77f4d", size = 377081, upload-time = "2025-10-08T17:28:21.926Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e3/73d1d7287637401b0b6637e30ba9121e1aa1d9f5ea185ed9834ca15d512c/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a6e9db6c73eb46b2e4d97bdffd1368a66f54e6806b563a997b19c004ef165e1d", size = 470779, upload-time = "2025-10-08T17:28:22.993Z" }, + { url = "https://files.pythonhosted.org/packages/9c/46/7ba7f9721e766dd0dfe4cedf444439447212abffe2d2f4538edeeec8ccbd/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9c44eae5ac0196ffc8b5ed497c75511056508f2303fa4d36b208eb820cf209e", size = 380865, upload-time = "2025-10-08T17:28:24.012Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7d/bb92a0782bbe0626c072c0320001410cf3f6743ede7dc18f034b1a18edef/ormsgpack-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:11d0dfaf40ae7c6de4f7dbd1e4892e2e6a55d911ab1774357c481158d17371e4", size = 112058, upload-time = "2025-10-08T17:28:25.015Z" }, + { url = "https://files.pythonhosted.org/packages/28/1a/f07c6f74142815d67e1d9d98c5b2960007100408ade8242edac96d5d1c73/ormsgpack-1.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:0c63a3f7199a3099c90398a1bdf0cb577b06651a442dc5efe67f2882665e5b02", size = 105894, upload-time = "2025-10-08T17:28:25.93Z" }, + { url = "https://files.pythonhosted.org/packages/1e/16/2805ebfb3d2cbb6c661b5fae053960fc90a2611d0d93e2207e753e836117/ormsgpack-1.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3434d0c8d67de27d9010222de07fb6810fb9af3bb7372354ffa19257ac0eb83b", size = 368474, upload-time = "2025-10-08T17:28:27.532Z" }, + { url = "https://files.pythonhosted.org/packages/6f/39/6afae47822dca0ce4465d894c0bbb860a850ce29c157882dbdf77a5dd26e/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2da5bd097e8dbfa4eb0d4ccfe79acd6f538dee4493579e2debfe4fc8f4ca89b", size = 195321, upload-time = "2025-10-08T17:28:28.573Z" }, + { url = "https://files.pythonhosted.org/packages/f6/54/11eda6b59f696d2f16de469bfbe539c9f469c4b9eef5a513996b5879c6e9/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fdbaa0a5a8606a486960b60c24f2d5235d30ac7a8b98eeaea9854bffef14dc3d", size = 206036, upload-time = "2025-10-08T17:28:29.785Z" }, + { url = "https://files.pythonhosted.org/packages/1e/86/890430f704f84c4699ddad61c595d171ea2fd77a51fbc106f83981e83939/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3682f24f800c1837017ee90ce321086b2cbaef88db7d4cdbbda1582aa6508159", size = 207615, upload-time = "2025-10-08T17:28:31.076Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b9/77383e16c991c0ecb772205b966fc68d9c519e0b5f9c3913283cbed30ffe/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fcca21202bb05ccbf3e0e92f560ee59b9331182e4c09c965a28155efbb134993", size = 377195, upload-time = "2025-10-08T17:28:32.436Z" }, + { url = "https://files.pythonhosted.org/packages/20/e2/15f9f045d4947f3c8a5e0535259fddf027b17b1215367488b3565c573b9d/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c30e5c4655ba46152d722ec7468e8302195e6db362ec1ae2c206bc64f6030e43", size = 470960, upload-time = "2025-10-08T17:28:33.556Z" }, + { url = "https://files.pythonhosted.org/packages/b8/61/403ce188c4c495bc99dff921a0ad3d9d352dd6d3c4b629f3638b7f0cf79b/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7138a341f9e2c08c59368f03d3be25e8b87b3baaf10d30fb1f6f6b52f3d47944", size = 381174, upload-time = "2025-10-08T17:28:34.781Z" }, + { url = "https://files.pythonhosted.org/packages/14/a8/94c94bc48c68da4374870a851eea03fc5a45eb041182ad4c5ed9acfc05a4/ormsgpack-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d4bd8589b78a11026d47f4edf13c1ceab9088bb12451f34396afe6497db28a27", size = 112314, upload-time = "2025-10-08T17:28:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/19/d0/aa4cf04f04e4cc180ce7a8d8ddb5a7f3af883329cbc59645d94d3ba157a5/ormsgpack-1.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:e5e746a1223e70f111d4001dab9585ac8639eee8979ca0c8db37f646bf2961da", size = 106072, upload-time = "2025-10-08T17:28:37.518Z" }, + { url = "https://files.pythonhosted.org/packages/8b/35/e34722edb701d053cf2240f55974f17b7dbfd11fdef72bd2f1835bcebf26/ormsgpack-1.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e7b36ab7b45cb95217ae1f05f1318b14a3e5ef73cb00804c0f06233f81a14e8", size = 368502, upload-time = "2025-10-08T17:28:38.547Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6a/c2fc369a79d6aba2aa28c8763856c95337ac7fcc0b2742185cd19397212a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43402d67e03a9a35cc147c8c03f0c377cad016624479e1ee5b879b8425551484", size = 195344, upload-time = "2025-10-08T17:28:39.554Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6a/0f8e24b7489885534c1a93bdba7c7c434b9b8638713a68098867db9f254c/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:64fd992f932764d6306b70ddc755c1bc3405c4c6a69f77a36acf7af1c8f5ada4", size = 206045, upload-time = "2025-10-08T17:28:40.561Z" }, + { url = "https://files.pythonhosted.org/packages/99/71/8b460ba264f3c6f82ef5b1920335720094e2bd943057964ce5287d6df83a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0362fb7fe4a29c046c8ea799303079a09372653a1ce5a5a588f3bbb8088368d0", size = 207641, upload-time = "2025-10-08T17:28:41.736Z" }, + { url = "https://files.pythonhosted.org/packages/50/cf/f369446abaf65972424ed2651f2df2b7b5c3b735c93fc7fa6cfb81e34419/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:de2f7a65a9d178ed57be49eba3d0fc9b833c32beaa19dbd4ba56014d3c20b152", size = 377211, upload-time = "2025-10-08T17:28:43.12Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3f/948bb0047ce0f37c2efc3b9bb2bcfdccc61c63e0b9ce8088d4903ba39dcf/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:f38cfae95461466055af966fc922d06db4e1654966385cda2828653096db34da", size = 470973, upload-time = "2025-10-08T17:28:44.465Z" }, + { url = "https://files.pythonhosted.org/packages/31/a4/92a8114d1d017c14aaa403445060f345df9130ca532d538094f38e535988/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c88396189d238f183cea7831b07a305ab5c90d6d29b53288ae11200bd956357b", size = 381161, upload-time = "2025-10-08T17:28:46.063Z" }, + { url = "https://files.pythonhosted.org/packages/d0/64/5b76447da654798bfcfdfd64ea29447ff2b7f33fe19d0e911a83ad5107fc/ormsgpack-1.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:5403d1a945dd7c81044cebeca3f00a28a0f4248b33242a5d2d82111628043725", size = 112321, upload-time = "2025-10-08T17:28:47.393Z" }, + { url = "https://files.pythonhosted.org/packages/46/5e/89900d06db9ab81e7ec1fd56a07c62dfbdcda398c435718f4252e1dc52a0/ormsgpack-1.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:c57357b8d43b49722b876edf317bdad9e6d52071b523fdd7394c30cd1c67d5a0", size = 106084, upload-time = "2025-10-08T17:28:48.305Z" }, + { url = "https://files.pythonhosted.org/packages/4c/0b/c659e8657085c8c13f6a0224789f422620cef506e26573b5434defe68483/ormsgpack-1.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:d390907d90fd0c908211592c485054d7a80990697ef4dff4e436ac18e1aab98a", size = 368497, upload-time = "2025-10-08T17:28:49.297Z" }, + { url = "https://files.pythonhosted.org/packages/1b/0e/451e5848c7ed56bd287e8a2b5cb5926e54466f60936e05aec6cb299f9143/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6153c2e92e789509098e04c9aa116b16673bd88ec78fbe0031deeb34ab642d10", size = 195385, upload-time = "2025-10-08T17:28:50.314Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/90f78cbbe494959f2439c2ec571f08cd3464c05a6a380b0d621c622122a9/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2b2c2a065a94d742212b2018e1fecd8f8d72f3c50b53a97d1f407418093446d", size = 206114, upload-time = "2025-10-08T17:28:51.336Z" }, + { url = "https://files.pythonhosted.org/packages/fb/db/34163f4c0923bea32dafe42cd878dcc66795a3e85669bc4b01c1e2b92a7b/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:110e65b5340f3d7ef8b0009deae3c6b169437e6b43ad5a57fd1748085d29d2ac", size = 207679, upload-time = "2025-10-08T17:28:53.627Z" }, + { url = "https://files.pythonhosted.org/packages/b6/14/04ee741249b16f380a9b4a0cc19d4134d0b7c74bab27a2117da09e525eb9/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c27e186fca96ab34662723e65b420919910acbbc50fc8e1a44e08f26268cb0e0", size = 377237, upload-time = "2025-10-08T17:28:56.12Z" }, + { url = "https://files.pythonhosted.org/packages/89/ff/53e588a6aaa833237471caec679582c2950f0e7e1a8ba28c1511b465c1f4/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d56b1f877c13d499052d37a3db2378a97d5e1588d264f5040b3412aee23d742c", size = 471021, upload-time = "2025-10-08T17:28:57.299Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f9/f20a6d9ef2be04da3aad05e8f5699957e9a30c6d5c043a10a296afa7e890/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c88e28cd567c0a3269f624b4ade28142d5e502c8e826115093c572007af5be0a", size = 381205, upload-time = "2025-10-08T17:28:58.872Z" }, + { url = "https://files.pythonhosted.org/packages/f8/64/96c07d084b479ac8b7821a77ffc8d3f29d8b5c95ebfdf8db1c03dff02762/ormsgpack-1.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:8811160573dc0a65f62f7e0792c4ca6b7108dfa50771edb93f9b84e2d45a08ae", size = 112374, upload-time = "2025-10-08T17:29:00Z" }, + { url = "https://files.pythonhosted.org/packages/88/a5/5dcc18b818d50213a3cadfe336bb6163a102677d9ce87f3d2f1a1bee0f8c/ormsgpack-1.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:23e30a8d3c17484cf74e75e6134322255bd08bc2b5b295cc9c442f4bae5f3c2d", size = 106056, upload-time = "2025-10-08T17:29:01.29Z" }, + { url = "https://files.pythonhosted.org/packages/19/2b/776d1b411d2be50f77a6e6e94a25825cca55dcacfe7415fd691a144db71b/ormsgpack-1.11.0-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2905816502adfaf8386a01dd85f936cd378d243f4f5ee2ff46f67f6298dc90d5", size = 368661, upload-time = "2025-10-08T17:29:02.382Z" }, + { url = "https://files.pythonhosted.org/packages/a9/0c/81a19e6115b15764db3d241788f9fac093122878aaabf872cc545b0c4650/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c04402fb9a0a9b9f18fbafd6d5f8398ee99b3ec619fb63952d3a954bc9d47daa", size = 195539, upload-time = "2025-10-08T17:29:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/97/86/e5b50247a61caec5718122feb2719ea9d451d30ac0516c288c1dbc6408e8/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a025ec07ac52056ecfd9e57b5cbc6fff163f62cb9805012b56cda599157f8ef2", size = 207718, upload-time = "2025-10-08T17:29:04.545Z" }, ] [[package]] @@ -2188,7 +2213,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.12.0a1" +version = "2.12.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -2196,113 +2221,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/e0/1a4cb54fbbb606f3b19fc0f5a3c8776902c49da847c32546674644cfa212/pydantic-2.12.0a1.tar.gz", hash = "sha256:5724048f8b728a721f993de95cadf6ef47ce18043034316798aa5acbab30f7ce", size = 802751, upload-time = "2025-07-26T18:28:55.313Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/35/d319ed522433215526689bad428a94058b6dd12190ce7ddd78618ac14b28/pydantic-2.12.2.tar.gz", hash = "sha256:7b8fa15b831a4bbde9d5b84028641ac3080a4ca2cbd4a621a661687e741624fd", size = 816358, upload-time = "2025-10-14T15:02:21.842Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/6c/e7f4ff7094ea277cc164c56325cf903a93312655f5d1ac68d7d5dd10facc/pydantic-2.12.0a1-py3-none-any.whl", hash = "sha256:f292f0df096a522be36558f1c569d840b27de8ad64234996b068b891c4bc99e7", size = 453772, upload-time = "2025-07-26T18:28:52.799Z" }, + { url = "https://files.pythonhosted.org/packages/6c/98/468cb649f208a6f1279448e6e5247b37ae79cf5e4041186f1e2ef3d16345/pydantic-2.12.2-py3-none-any.whl", hash = "sha256:25ff718ee909acd82f1ff9b1a4acfd781bb23ab3739adaa7144f19a6a4e231ae", size = 460628, upload-time = "2025-10-14T15:02:19.623Z" }, ] [[package]] name = "pydantic-core" -version = "2.37.2" +version = "2.41.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e6/e3/533de0eacb89752c7536bd6551f622e70953811d4cf33f16765bc768e053/pydantic_core-2.37.2.tar.gz", hash = "sha256:78fb2a749123408fedaf540a22ca6bf0b5ec1f522a14fc00e27ede33d8ac088c", size = 443903, upload-time = "2025-07-26T11:30:10.053Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/f7/648935bc606e9b8a1f961c7f31d0830744e77d8babf09b1071c9f56bb31a/pydantic_core-2.37.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5fe49144cee5f930dbfe9df3d4aaa4ed7dd6bd64cc63096187dace92e5fd2c4e", size = 2113876, upload-time = "2025-07-26T11:26:49.044Z" }, - { url = "https://files.pythonhosted.org/packages/21/ab/0d05135c6bf31052d995c0c06eb9d1f9a55726cf6baec1480f44e190095e/pydantic_core-2.37.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:49e16fa24b0015b5eadfb54cf5ea544a17de618981e5b4d4f4437702e433c11c", size = 1880791, upload-time = "2025-07-26T11:26:50.796Z" }, - { url = "https://files.pythonhosted.org/packages/1a/83/0869c2de4cdf4d0f90b06a9384ffb7e73f667987ae3893ee3a94201560c7/pydantic_core-2.37.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fc32e7e60f2feb2c564079a44e97f4e50be22a52a88fd7eb1c7df4b4d8012db", size = 1964403, upload-time = "2025-07-26T11:26:52.482Z" }, - { url = "https://files.pythonhosted.org/packages/90/20/44f9aafede5c761b979b5d0a79bcca32818fd3b8dbba9f1780f0ff9098ed/pydantic_core-2.37.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:19d804119aa9889bf18d86f8308b87b2dfbec758eb42fdea3094eed7d29242df", size = 2041009, upload-time = "2025-07-26T11:26:53.96Z" }, - { url = "https://files.pythonhosted.org/packages/46/e7/0f53c8e4f3f8b5de93bf584a3e3f0c352046d4262da0777c987102991e3c/pydantic_core-2.37.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:188ef23ea5341c27d307badf9f14caebba36720cb24e1d67b7bfffbee0f36bb4", size = 2228327, upload-time = "2025-07-26T11:26:55.721Z" }, - { url = "https://files.pythonhosted.org/packages/69/3e/43887670f9883a661e97d64decb80214773e77d3d99cec80a01428f3c1de/pydantic_core-2.37.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c066b2f6424e2837d0855131cba3cb2bb6c5229da978f37fc301f577fac74970", size = 2305599, upload-time = "2025-07-26T11:26:57.05Z" }, - { url = "https://files.pythonhosted.org/packages/b0/cc/668e8f22df0a95e3f401acdf6d24954e5238c1d27a75c770fe3cf503d8e4/pydantic_core-2.37.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd3a3680429ba5fdb23f2682188a2acb166a9b96cbecbb1c9a5338acfcdcd6f8", size = 2036594, upload-time = "2025-07-26T11:26:58.736Z" }, - { url = "https://files.pythonhosted.org/packages/c3/15/46f5573f54f5c037274def1da80b6258813c6c684f1d3dea94f863da5de7/pydantic_core-2.37.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d22cd58bb142164600fbfdaca5a1cc7a76e636e9b29a41a027dfeb2a175fe6b", size = 2179568, upload-time = "2025-07-26T11:27:00.589Z" }, - { url = "https://files.pythonhosted.org/packages/a4/f9/3a93af02eff8df724180b20ca34b20c57e85ad7659c4b9c4ed7a75887958/pydantic_core-2.37.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:15effd30feb00b341cf84b53a14402ecf1177ddf24e940defbacc066bf9b3294", size = 2141727, upload-time = "2025-07-26T11:27:03.2Z" }, - { url = "https://files.pythonhosted.org/packages/bd/4a/7e4c5ddaada092918329cbe3f47c91cf4ca73bc4979cc74e1fa406639f57/pydantic_core-2.37.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:0cdcc3ba7eac65f0cda1aea9da09681c14dc7b052d1028b291efda1710b11a04", size = 2308547, upload-time = "2025-07-26T11:27:05.027Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/05fec5271a891ed026ffeec99dfc5b3b2baed2baf7c4f6dd006f7f4250ec/pydantic_core-2.37.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:807e2a50fc1c99963a2e33490fa0a56b04e47a31534a6389921be9f3fad39a97", size = 2320709, upload-time = "2025-07-26T11:27:06.479Z" }, - { url = "https://files.pythonhosted.org/packages/e9/17/d60b349bf5f5b093ddef6ea2bd838fb09a062013c8d8b33146c9397fc309/pydantic_core-2.37.2-cp310-cp310-win32.whl", hash = "sha256:11349ab788fd56428d01ec0dc0a34d79f97dddc17727875259c9da357b8e968f", size = 1967637, upload-time = "2025-07-26T11:27:07.943Z" }, - { url = "https://files.pythonhosted.org/packages/f4/1a/99aaaef3b6f7d414a0384c2b9c3c9cee44146de442f102ad57ddd29d9e60/pydantic_core-2.37.2-cp310-cp310-win_amd64.whl", hash = "sha256:205257f8f14febda5af04c15d20018ada14ec082486ab9f90510937bdc99089b", size = 1995602, upload-time = "2025-07-26T11:27:09.575Z" }, - { url = "https://files.pythonhosted.org/packages/66/db/e43fc93054e40490dadee46fdad516f0336bd58f74e35849391f8385c14b/pydantic_core-2.37.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5f7fd8347b1dbdcd529175c5a9dcb131e964e581bbf4985a2edd184ce4189f8a", size = 2113435, upload-time = "2025-07-26T11:27:11.066Z" }, - { url = "https://files.pythonhosted.org/packages/a3/d2/b777f106c2dafdf5a4320a32be5445873c11c62f57c1d398bcdcff4d9899/pydantic_core-2.37.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce36ad8ed3044dc7f4e1c023b409ba619f1e4b01c3dff2089d15d58ff1be6e85", size = 1878915, upload-time = "2025-07-26T11:27:12.847Z" }, - { url = "https://files.pythonhosted.org/packages/43/65/c910aab41b0d6dc9111322dd005d0c3b02681ccb2fb8fe0998425dbb5897/pydantic_core-2.37.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95458d6ffd174baffa92d1de7f554a5bd241d01ece213610549454df85246977", size = 1964754, upload-time = "2025-07-26T11:27:14.637Z" }, - { url = "https://files.pythonhosted.org/packages/31/4f/8adb11256c145878cfd048b195d829ae028cd6ddd6bf54ad5f2ddd6bc009/pydantic_core-2.37.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:69ca4ad75a16d133418585487a1f411d9e108babfcf16929a1a80e7749679ec5", size = 2042033, upload-time = "2025-07-26T11:27:16.437Z" }, - { url = "https://files.pythonhosted.org/packages/a0/59/5632eacb0a67484972651eba2d2a92d8c3cebfec8b5cdd11929e156e15d5/pydantic_core-2.37.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:471fdd3ce9b6c64ebec530297b89b0bff79f6a09ed8a64da7546c6bcf652fa62", size = 2228995, upload-time = "2025-07-26T11:27:18.012Z" }, - { url = "https://files.pythonhosted.org/packages/16/83/cce6ca6e70cfb595500ceb525d8a5ff7a5a9b6816fd6c8a108ebd7aa3ac3/pydantic_core-2.37.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b3e5c3916f052fa215eaf790dbec963fec112670cdb6b5043023d422c056b5c", size = 2304893, upload-time = "2025-07-26T11:27:19.542Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4e/4003a1096f852c3b4614990aacb1fcec696e6db9ce929d7dfab4466cf644/pydantic_core-2.37.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:673fbca1f76e85172e5a73f2031e281da3a509bb61721e98bf8121893c7c2f9a", size = 2034951, upload-time = "2025-07-26T11:27:21.033Z" }, - { url = "https://files.pythonhosted.org/packages/08/18/9464484912996b5fc6485d5eaa4afed9d51e1b7d95c934e3b8d2bb0d4327/pydantic_core-2.37.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:86173c14c855c7f1ebbdd8a446a985ab829c547206f5a7e6e88ff5571b9de147", size = 2179993, upload-time = "2025-07-26T11:27:22.852Z" }, - { url = "https://files.pythonhosted.org/packages/c2/67/fbe323eea3503e102b807cf5205f9282dc3bed6aa3433184cdf3e9b67bc3/pydantic_core-2.37.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:809212fa54f5940c3658cad730435218c6adfcaadb3b2c7edf4e0ef2dae56cc9", size = 2141531, upload-time = "2025-07-26T11:27:24.428Z" }, - { url = "https://files.pythonhosted.org/packages/d9/8e/8760eb01795c3d1c75e0df7c050e376c8b642fe6b6d75a1700c14f38aed6/pydantic_core-2.37.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:6a99ac69ae60b7e4425162a22342687274133f4ac42df74a8d542f2a10d4a636", size = 2309673, upload-time = "2025-07-26T11:27:26.674Z" }, - { url = "https://files.pythonhosted.org/packages/e1/61/b084ac6ead45d74f1b90d2e3fa30cf54502bf04cbd45d4392b895f98c28d/pydantic_core-2.37.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:21c471185968fab5b4b8848bbe8580b44d52511210547c20c6883b09b19872b7", size = 2320023, upload-time = "2025-07-26T11:27:28.177Z" }, - { url = "https://files.pythonhosted.org/packages/13/5d/4d1e943f76114a2a343c6e398913be5f93c6abdbc4316d7b683b0d8c6011/pydantic_core-2.37.2-cp311-cp311-win32.whl", hash = "sha256:d56baaf071198f8ae9a9fc426021e14cd7324650add0af9265669ccb51e6ca39", size = 1967351, upload-time = "2025-07-26T11:27:29.775Z" }, - { url = "https://files.pythonhosted.org/packages/01/40/7103a2919f57833778692ee5895e5964882eb887a9950334936590652bca/pydantic_core-2.37.2-cp311-cp311-win_amd64.whl", hash = "sha256:606fff9bae16d56a45ea02a7d19ce0756321561febfebe72843bfc8e102dbac7", size = 1994412, upload-time = "2025-07-26T11:27:31.299Z" }, - { url = "https://files.pythonhosted.org/packages/e1/cb/d7284e1affaa422dc0c505f817eee60bceb1cbe4248b73d1498c9fbf9b4c/pydantic_core-2.37.2-cp311-cp311-win_arm64.whl", hash = "sha256:922970c9cf4c5f744aacd992b0ac03ae3ade7dafde3af4ea81cbf617eefb557c", size = 1965525, upload-time = "2025-07-26T11:27:32.813Z" }, - { url = "https://files.pythonhosted.org/packages/a1/c2/f2c4976b36ea1bdc051e33bca8cf230408558cf47c6bd060d418ba222cc4/pydantic_core-2.37.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:35c67588818c8c2bd6b3d75bc7fe49bd3f4ab9c62642d5e7bc4b0af6a5aae923", size = 2088688, upload-time = "2025-07-26T11:27:34.404Z" }, - { url = "https://files.pythonhosted.org/packages/ad/90/8a0c4566ef77bc2fdf08834f085a0f3d419dfe3f811d8333bf4ab021a1e7/pydantic_core-2.37.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:379d5cc78f27d1f3620d09926844f50526a41d92b7e95df73320d47668d2f1fb", size = 1876770, upload-time = "2025-07-26T11:27:36.217Z" }, - { url = "https://files.pythonhosted.org/packages/cd/53/951c17547b03fd631c28f474482dbf0fea67bfc611c9feb3234cfb5b07d6/pydantic_core-2.37.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33025a1e442e301eff4d446625e2ca7897634aa7aeddcd38a34c9d4437cb035b", size = 1932502, upload-time = "2025-07-26T11:27:37.701Z" }, - { url = "https://files.pythonhosted.org/packages/e1/b4/a1b1ccddb9f075e71d19c25d9cf4b535fa8dfa41cc0cabafb340e275cefe/pydantic_core-2.37.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e842b55174d3dea826e4491b47d0b23c219c02c13f3c5f5f03adb7797fc5baad", size = 2022499, upload-time = "2025-07-26T11:27:39.523Z" }, - { url = "https://files.pythonhosted.org/packages/d1/d3/1c516083c71a4f9416147f02899f516c4b16af18289a49145c71d5ac83bb/pydantic_core-2.37.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8e13418866bf058af9c7df85174cef589d7cf5d396307695e0d9c6f3c77c1db", size = 2189899, upload-time = "2025-07-26T11:27:41.156Z" }, - { url = "https://files.pythonhosted.org/packages/65/cb/de27dc5daa324e1a9b73a29ae37edaaa1e4b70608c511806be2a22a026b8/pydantic_core-2.37.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64e1fb84f8d1892d2b32f370c1eb450902f97da0042f1638749313198453e5bc", size = 2304399, upload-time = "2025-07-26T11:27:43.05Z" }, - { url = "https://files.pythonhosted.org/packages/3a/1a/591b46a452589fe2525584ccf401cea00532b8bffd20640ebebbe71d75d1/pydantic_core-2.37.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1144cc50a3e22e44b72445ed9d946021d91e11919feadc2dfbc183384969293b", size = 2041091, upload-time = "2025-07-26T11:27:44.566Z" }, - { url = "https://files.pythonhosted.org/packages/f7/c8/7ff04f64262a710ab0cc765deeb4bfe65c4f4b89e1728cc672bdefede7bb/pydantic_core-2.37.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bc376b15d085cf8b42b3e506e4dc15886e358a71fab6fd78d4e260a1c3d8020", size = 2151726, upload-time = "2025-07-26T11:27:46.25Z" }, - { url = "https://files.pythonhosted.org/packages/6e/dc/6b75b0f7590ff425490b1d2e927d2fbec234eabe1f6bd6572d71ff9e974d/pydantic_core-2.37.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:69e85daeb48996505dbe92f94ecefa8db6ac6d7eccbd35d0e43db0c50c7e8356", size = 2113893, upload-time = "2025-07-26T11:27:48.186Z" }, - { url = "https://files.pythonhosted.org/packages/9c/da/c443fb8ba4618e518c85797a0c3e70f39c389c2172ffd256383a62ae86e2/pydantic_core-2.37.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:20386a94fdc0d83fc6b21f6879e88a9fa565763fff00f09cb93244387aeafaf4", size = 2297947, upload-time = "2025-07-26T11:27:49.72Z" }, - { url = "https://files.pythonhosted.org/packages/e9/32/ea2fa758c8b97d3beda11ee02ac84fcae8f0bc6e6d743581672f372e6d0b/pydantic_core-2.37.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ffc850e6eded246e486d17685f9bfb397477032ddb30ba7752a59f6b3f86c943", size = 2300476, upload-time = "2025-07-26T11:27:51.437Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ea/a5a66dc952eee266fa35ab210e521b16ac7cf11219ae08919bcb107b0742/pydantic_core-2.37.2-cp312-cp312-win32.whl", hash = "sha256:c5f13e4bb50a3991731577ce9a0c30750a489c1af0bdcf151798c238c5598108", size = 1947760, upload-time = "2025-07-26T11:27:53.046Z" }, - { url = "https://files.pythonhosted.org/packages/96/34/33bc6b4c57e3ce55169da481d72c08c76b7ec38369d5274f154dd61b94fa/pydantic_core-2.37.2-cp312-cp312-win_amd64.whl", hash = "sha256:9e4ca3155f9b8382b0eeea5f0464cf456f316088177bde5691414230091aa810", size = 2004701, upload-time = "2025-07-26T11:27:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/1d/e7/b5760ccc11cb8778f64ce09ea06f9f79e562a460553f75b4a2e116c63946/pydantic_core-2.37.2-cp312-cp312-win_arm64.whl", hash = "sha256:ca4c5b48e83d1e718eb3d6f25b37f060526e75d5178e1ea79ad3dc183e2372ec", size = 1947616, upload-time = "2025-07-26T11:27:56.561Z" }, - { url = "https://files.pythonhosted.org/packages/b0/99/f85f974b76d9e7f226faca45552b285f1c6e22634187cccb31eeab384711/pydantic_core-2.37.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:544f6430e2fb3e332b9fef44c7709564876e8a56bfe27b7d09e41eae8348b804", size = 2093409, upload-time = "2025-07-26T11:27:58.146Z" }, - { url = "https://files.pythonhosted.org/packages/ff/97/c42cd9235aa2f3d7f1908c17d0913e4b577c0e6a3bb9a4287362903a6271/pydantic_core-2.37.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:33f67465de77043f5382d8a5237b68ca36327ebd8ab726609c66a0e8a383a13f", size = 1876181, upload-time = "2025-07-26T11:27:59.722Z" }, - { url = "https://files.pythonhosted.org/packages/64/ba/d7558ef46460544e6383ad224a1351edde6ec2113df0310e4dd25068a0dc/pydantic_core-2.37.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a686a713976796772f71117249de2bdd55edb0295964426ef2ceaa77c99b711", size = 1937685, upload-time = "2025-07-26T11:28:01.356Z" }, - { url = "https://files.pythonhosted.org/packages/5e/66/d4e5ba678659b6fabc471ce8d4dab61a39b2a0aa70b29c5daed77c47ed74/pydantic_core-2.37.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebef7afcbb10a1269c6df9571f6a3cdba29d6415e2432a2dec03e6d782d8344e", size = 2031864, upload-time = "2025-07-26T11:28:03.076Z" }, - { url = "https://files.pythonhosted.org/packages/a6/dc/80ed1b744a5e67220822c75ace93109d3cc1c70fa3f6d3a10c6c681ecdc4/pydantic_core-2.37.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0dc5d2620b80126de92da3c2ed8503b9daba5edc8eea3c1823d6fcb188589345", size = 2194693, upload-time = "2025-07-26T11:28:04.734Z" }, - { url = "https://files.pythonhosted.org/packages/2f/3f/7a7669f15837d34b392f87554b120e23d610039894edc137f56db7df3a2e/pydantic_core-2.37.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f80059c7e07d14000a1c59c31e304b0832448cf4cc906f9386cb19047abb681", size = 2303209, upload-time = "2025-07-26T11:28:06.638Z" }, - { url = "https://files.pythonhosted.org/packages/6b/96/0fa9cff2a5a64dfe7680fa47d411eab5eb2965ec81ecdb6584fc9fcee988/pydantic_core-2.37.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a012d9a45382b256500400f36b8cdc8ec96f6d2913b5f8ee8f767b1b092ce8bd", size = 2042003, upload-time = "2025-07-26T11:28:08.181Z" }, - { url = "https://files.pythonhosted.org/packages/ab/a5/0c553a30312b374a371435ba063332e0380bfd7bbc8a8a2279c8ebc483cf/pydantic_core-2.37.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a4828a9a63168ade035052c5e5d8f2a5f8ed39b3a8d085bd3c0f0577bb2aab5", size = 2157534, upload-time = "2025-07-26T11:28:09.926Z" }, - { url = "https://files.pythonhosted.org/packages/12/c5/6cb6621a1cb36e726848b4f164902450f52bcf8ccdb08199ab7b7d4d7c30/pydantic_core-2.37.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b0f69cfbe6ab42944d84bea28c9f3a58f63fc1aef728ed176e4319ad36623dd", size = 2117283, upload-time = "2025-07-26T11:28:11.998Z" }, - { url = "https://files.pythonhosted.org/packages/ca/30/300d04f8c0a4c8e7454ea3aed25ab5791ec6bafdcab311409fed979956b7/pydantic_core-2.37.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:3f2f004f0ad41d6fd26b59a41d24c5c01d5f98ccabde338d4a35e76a864dfaf4", size = 2306110, upload-time = "2025-07-26T11:28:13.716Z" }, - { url = "https://files.pythonhosted.org/packages/38/2a/d5fe85e8f99bbd56d8865639aff06b71f1559534572f667fcf719c8a8b94/pydantic_core-2.37.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1e90e7495d50e6063bd7c1cd669a01697ff671719781d095ad1d2966a26b0218", size = 2308043, upload-time = "2025-07-26T11:28:15.518Z" }, - { url = "https://files.pythonhosted.org/packages/48/4b/864da5cee748f83de5cce65d649178856e36c86d8e2245705ced8bc95374/pydantic_core-2.37.2-cp313-cp313-win32.whl", hash = "sha256:c7f7fdef12127c680a892cbad70429a6103fa2eb476897b5f5cca2c25dd50d7e", size = 1955505, upload-time = "2025-07-26T11:28:17.551Z" }, - { url = "https://files.pythonhosted.org/packages/40/33/ea811972701d768af9a0bebbe45db196dc2826ae343b5093832950cb5875/pydantic_core-2.37.2-cp313-cp313-win_amd64.whl", hash = "sha256:0433ad8291d1a0c84dfafc993483edb12280a5ba39c547ab965b4c1f2b78f05e", size = 2004808, upload-time = "2025-07-26T11:28:19.766Z" }, - { url = "https://files.pythonhosted.org/packages/45/15/7e7d6581ef9cb72e8b9a5e0f5940838c0c1f98a26d98767f693582f570ea/pydantic_core-2.37.2-cp313-cp313-win_arm64.whl", hash = "sha256:aa033221b49abe4e91431249ef52c2dfa31f13f3187a1e581586a4cb04567f6e", size = 1953700, upload-time = "2025-07-26T11:28:21.345Z" }, - { url = "https://files.pythonhosted.org/packages/4f/cd/4b4d305cdb334ef77836e6f6898e6426d91a51b27268e03c14f63e09849f/pydantic_core-2.37.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a909d7a8eab6a40801e39651bb4912ae39f655c131928b2c2a56e17216637475", size = 1829454, upload-time = "2025-07-26T11:28:22.998Z" }, - { url = "https://files.pythonhosted.org/packages/6a/7c/09cc9b8fb9fc5bc5cf7e8d8f712acc25d7533ade9f759aa5f70d49e8be42/pydantic_core-2.37.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a945ef942eec1d28a4b67c7a72686eb0d9bc60f6b1d06d3bafcd376c7bffbc80", size = 2007536, upload-time = "2025-07-26T11:28:24.756Z" }, - { url = "https://files.pythonhosted.org/packages/f3/36/4315c3d05fdb34aff9647bb0ca8054a43b620f9d881ad7a644312456763c/pydantic_core-2.37.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2c394c7fc57bbbab24a7c2ff2109ad6a399413f843c418ff0523c7fdd321c09", size = 1961022, upload-time = "2025-07-26T11:28:26.477Z" }, - { url = "https://files.pythonhosted.org/packages/de/19/07e73aba49c1cba4c9131a09f64e9bb419b5ee3e63b60322ea28ac586111/pydantic_core-2.37.2-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:dd2d989d27e15494677cd55512987bde711622e86ede2058444aa8346a15c648", size = 2094344, upload-time = "2025-07-26T11:28:28.215Z" }, - { url = "https://files.pythonhosted.org/packages/97/15/9b7a4d366647e9d27c35fb018e9310a71545f2e1e776573ee77afd00fca6/pydantic_core-2.37.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3bf27ce2fc6345a7c073786fcc5b68b667c7b4566c774fef9a61bf3611e67d6b", size = 1870386, upload-time = "2025-07-26T11:28:29.891Z" }, - { url = "https://files.pythonhosted.org/packages/3f/15/bfdbcb091b33c0b31563c202e2b4f4376110c56e41e2a4b55ee96a60c82a/pydantic_core-2.37.2-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d76e1db764c7756281fe17d06d1825610fe8ab3aa4acf1a592e1fb1925a79642", size = 1936224, upload-time = "2025-07-26T11:28:31.686Z" }, - { url = "https://files.pythonhosted.org/packages/26/b8/994c0dff145213e29d59c8c6013984c07abaaa806f609dd9bf6f57267f23/pydantic_core-2.37.2-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ce27109f89e70c6dba047862c07ef326f4a7243f54dd42f5d59cc3dca5e8631f", size = 2033654, upload-time = "2025-07-26T11:28:35.296Z" }, - { url = "https://files.pythonhosted.org/packages/16/69/661ae29dc7a298431f5ddff5b3291124f92b079d6b1801859058ef9e0df5/pydantic_core-2.37.2-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:182a42835e5ad1ac3970785df9dbe25c8418aba03042679b4657bbd992fbfb39", size = 2195866, upload-time = "2025-07-26T11:28:37.086Z" }, - { url = "https://files.pythonhosted.org/packages/2c/59/9904f61abc8c4883303e5164de3c2031ed7d5c5492fe89630306669bea15/pydantic_core-2.37.2-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e51a7b13074fa00d97e5cb60344a71b0923771ffacb01acb0476138b22597e33", size = 2306381, upload-time = "2025-07-26T11:28:38.897Z" }, - { url = "https://files.pythonhosted.org/packages/09/36/7be5601b4933643553fe394f237bbd9f5604cc825b9dab383438d12c97e9/pydantic_core-2.37.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb879dab63b34480a1d28ec54ccbe0f62776f530ae9ad06bf0769bf889662146", size = 2036809, upload-time = "2025-07-26T11:28:40.608Z" }, - { url = "https://files.pythonhosted.org/packages/14/96/a32e9cfddc449e88bed43192f52715aaa88fe04571d7d355083cbae9aea7/pydantic_core-2.37.2-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b3f68bf55d4e5c7daee7dcc455534af4a95ff5ddff1181f1fcb66278e5ef0e7", size = 2160913, upload-time = "2025-07-26T11:28:43.04Z" }, - { url = "https://files.pythonhosted.org/packages/a9/8c/5fec1161b604b0861cea7c1c158e4fc227977a7dd12a10614e0f96930134/pydantic_core-2.37.2-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:777533bf33f38de0fa95141051faf092378c83e61762636c8d4df9051ebbbded", size = 2116794, upload-time = "2025-07-26T11:28:45.28Z" }, - { url = "https://files.pythonhosted.org/packages/9c/7f/66f4815884845bd8ffb37af25e88ffdae1f6a7ce840dcdb5de2933fb66f7/pydantic_core-2.37.2-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:e935a616dc13c07dcfbe2e57fd2898216a2c8b8c331a4c2be6ac5833c811b6f5", size = 2307258, upload-time = "2025-07-26T11:28:47.105Z" }, - { url = "https://files.pythonhosted.org/packages/cb/2f/8f9eb491985d4959fc1a4def1d5081eb3fdf28f449041918fa35f3afa825/pydantic_core-2.37.2-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:88b1f419faee6d031b595abaa7032399a666672d06636b05e14824452eabfcbd", size = 2308490, upload-time = "2025-07-26T11:28:49.386Z" }, - { url = "https://files.pythonhosted.org/packages/a9/51/69c9e1cbb0a6a6fd79a0d6acad6c3b7ec4778d555d12d73e871a9fa123f4/pydantic_core-2.37.2-cp314-cp314-win32.whl", hash = "sha256:ef57747b2a7df50598d843bf978b245e7d86199eb6ac1cf118151d7015789b31", size = 1959064, upload-time = "2025-07-26T11:28:51.442Z" }, - { url = "https://files.pythonhosted.org/packages/0d/1f/0557c25049631e02f72d7cffa1b71dd49b311e3e18fe9db7267d0e2ba5f3/pydantic_core-2.37.2-cp314-cp314-win_amd64.whl", hash = "sha256:476ca176d01b8fa7fa4adcb7432a4221feb9711675a766618b7fad7878b38f46", size = 1998295, upload-time = "2025-07-26T11:28:53.286Z" }, - { url = "https://files.pythonhosted.org/packages/c8/75/c9bc4797ef55e49272eb1f27cc1d9cd2e1fa7e45d9707d19f29a1fa438e2/pydantic_core-2.37.2-cp314-cp314-win_arm64.whl", hash = "sha256:765519587e15e24535670a3f903ded586bb2ca44ab103d3963f8a8ed6a7c87a2", size = 1952949, upload-time = "2025-07-26T11:28:55.124Z" }, - { url = "https://files.pythonhosted.org/packages/1a/28/689fc6fc953d80f65f35ff72e30db81609b26fd15be54cb3577b8f684f58/pydantic_core-2.37.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e16d5f519d85acd5deb7d728f22f0433aa80469b3441de24a72a9a8d2561d0d9", size = 1830246, upload-time = "2025-07-26T11:28:56.983Z" }, - { url = "https://files.pythonhosted.org/packages/47/4e/40c30c2de00db6ab11d24b12c3b6bd8e96b4606a0d819bda381a9ed2c0c3/pydantic_core-2.37.2-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eda70db8850d2cb0ca0d38a5cc5ebfac366cdd36689f128ec4b9fbabbf68973", size = 2008349, upload-time = "2025-07-26T11:28:58.863Z" }, - { url = "https://files.pythonhosted.org/packages/1f/9c/b7248f688933d0340b28f23d78bbc791a51076b9953ae9f0ada4fa7ddcd4/pydantic_core-2.37.2-cp314-cp314t-win_amd64.whl", hash = "sha256:18a0c379bed0c7d05f1ed4de2a2e8dba9a1a21618e8694b8c27be92e46dd3d90", size = 1962545, upload-time = "2025-07-26T11:29:00.644Z" }, - { url = "https://files.pythonhosted.org/packages/46/04/f771e10d280821f74bbe610874b4b0eda94a8c46b5fb93fd81821f0e98fb/pydantic_core-2.37.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:46d70f87518f5426790eba9b2000015874ca4b01770929b45a04f2f2ba689b56", size = 2112568, upload-time = "2025-07-26T11:29:29.397Z" }, - { url = "https://files.pythonhosted.org/packages/87/6f/f4a40319e2b263a46f6287465372c6ecb94a50fbf819e9ca8e39af828fdb/pydantic_core-2.37.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:0cb6779157dbbf29ee9cef44599950cb41bec0673c4af37a5b66ccc933a3a55c", size = 1931604, upload-time = "2025-07-26T11:29:31.189Z" }, - { url = "https://files.pythonhosted.org/packages/68/a2/64ecea2b2d3094c432dcb4c51985b14be53294e5c1f35166b6138d53cf64/pydantic_core-2.37.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0792e9dc1e0e1e77ac5b884cb4c1b7965a436215d8a18a553211d24b47cab22", size = 1966031, upload-time = "2025-07-26T11:29:33.113Z" }, - { url = "https://files.pythonhosted.org/packages/5c/85/866ac71c64adcd6e11e72984164e19fc0cb071f0e0f7d80a3cc93eaf6102/pydantic_core-2.37.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1938c7782563cc435469ce7c0d56c2ddbbfdcdc618f81e9240370834532613c3", size = 2153328, upload-time = "2025-07-26T11:29:35.308Z" }, - { url = "https://files.pythonhosted.org/packages/4e/cc/f508e74f394c7f921584704bfed2ebd750c551ebe86c95b4d2916ed0bb58/pydantic_core-2.37.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd28c154ca44d9dc8cd9e11541ceeab7b68d96b78281a114b7441c779fe2c483", size = 2175566, upload-time = "2025-07-26T11:29:37.167Z" }, - { url = "https://files.pythonhosted.org/packages/94/b3/2df9285fca74c1d2349d9b6d9405450826954c5fc8c39ea86323b4ebaf1c/pydantic_core-2.37.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:005032e79dd34b357d77f6020c7a389a084a9e3953f07b85fa1cfd84f91b4792", size = 2143636, upload-time = "2025-07-26T11:29:39.453Z" }, - { url = "https://files.pythonhosted.org/packages/00/8e/cc2e2ddb578cc9f244968add56cbc992dd0d461b16884c1efbd85d865e97/pydantic_core-2.37.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:794d124d21f7cee72929fac75cd1a110560a9806eee805743e237848f90c54d6", size = 2309589, upload-time = "2025-07-26T11:29:41.763Z" }, - { url = "https://files.pythonhosted.org/packages/e1/d2/987b981cfa4e511646dc54239628cd8629fe7807e73b63d1cb644574ffe0/pydantic_core-2.37.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ae9c016c39f0111009ef34a5554ea204663310706f727bca40f00c9182fcb220", size = 2322421, upload-time = "2025-07-26T11:29:43.926Z" }, - { url = "https://files.pythonhosted.org/packages/d9/2b/93cfc8492a1a9919def29d1714eaaab3ba53c7141d985dce6cdb28a59ba7/pydantic_core-2.37.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e0481c2fe8e1fa7bc4bd45acc4dd96eee7f99ae0c170603161e43dd3886f0333", size = 2148002, upload-time = "2025-07-26T11:29:45.862Z" }, - { url = "https://files.pythonhosted.org/packages/a2/77/c9dab01bc02b771479c6c22af891daf7788b2af6908a2ad4d91aba80bbec/pydantic_core-2.37.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d2b202c7896403c475628e6a6f4b33761ee5e492c9c87a4ca4142324c90308ee", size = 2113400, upload-time = "2025-07-26T11:29:47.813Z" }, - { url = "https://files.pythonhosted.org/packages/1f/93/547d20bd164c8aa061d836f749198ad3083057bcede2e62385eb60213cfc/pydantic_core-2.37.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d15c6a66204100d9dd363ed192971fab906b7ba9d55efe01deaf421ced66c60f", size = 1932076, upload-time = "2025-07-26T11:29:50.282Z" }, - { url = "https://files.pythonhosted.org/packages/73/a5/6402d07e5c3cb5a07b737606098bab0a3379be1927ce2a7799f7e32310cf/pydantic_core-2.37.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23f0f3648bb68a188e15cb99e3d715ba56293d2b8e9984bf3b6d373a9ed5640b", size = 1966050, upload-time = "2025-07-26T11:29:52.775Z" }, - { url = "https://files.pythonhosted.org/packages/f7/65/29621f969c94dbc11e2c20d3043ea4eb1a9303417124c78cd22877f2c471/pydantic_core-2.37.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d030263fcc8520d0fe772caf773e535cc3ff6e4c5d7af2ecb21f1ffee4eed5", size = 2152682, upload-time = "2025-07-26T11:29:55.274Z" }, - { url = "https://files.pythonhosted.org/packages/08/6a/ea938afefa1b59e5ff563c978fbfa4cc888cd287a051165b30e1a5293a3a/pydantic_core-2.37.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ca7e1d981ea3eac5864f42150f73b805ff5645d2dcb9615f9cc716eb468538a", size = 2176492, upload-time = "2025-07-26T11:29:57.199Z" }, - { url = "https://files.pythonhosted.org/packages/bf/1b/30b1ea0deffbbfa0c1681112029d3a58ea364c61117d528e22e925d48a64/pydantic_core-2.37.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3f68fc979de6881467086bd26490afecf47af691004a6a61bcc5f64ca8f8b556", size = 2143589, upload-time = "2025-07-26T11:29:59.62Z" }, - { url = "https://files.pythonhosted.org/packages/0d/09/48dc1d9170265cfdddc2fe3872702d52b371acfb6270910657e824004b40/pydantic_core-2.37.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2ca5511d4c815269496e577af62fe0ed8dc4fafb1f7d6bd280063f174d9c6c10", size = 2310473, upload-time = "2025-07-26T11:30:01.845Z" }, - { url = "https://files.pythonhosted.org/packages/32/09/f4958da406b4f362d8c086984cccb95a02ff8f415acd87ec20e75f48d7d4/pydantic_core-2.37.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:257dfc9a1a7f9a2ab203423ca13c10e52798a26081f9ffc43d1c0b90897443e2", size = 2322311, upload-time = "2025-07-26T11:30:04.188Z" }, - { url = "https://files.pythonhosted.org/packages/22/d2/d77b3fd4a3a4d0cfd0b66b6c32427100c445c1f6aea543cc533071d2c399/pydantic_core-2.37.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1fdc045a4ebb04e2218430c7084dd7d30fce601e939c4a0825677dffdf7838b5", size = 2147379, upload-time = "2025-07-26T11:30:06.273Z" }, + { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, + { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, + { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, + { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, + { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, + { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, + { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, + { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, + { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, + { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, + { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, + { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, + { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, + { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, + { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, + { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, + { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, + { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, + { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, + { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, + { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, + { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, + { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, + { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, + { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, + { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, + { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, + { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, + { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, + { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, + { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, + { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, + { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, + { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, + { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, + { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, + { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, + { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, + { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, + { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, + { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, + { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, + { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, + { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, + { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, + { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, + { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, + { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, ] [[package]] @@ -3205,7 +3240,7 @@ wheels = [ [[package]] name = "torch" -version = "2.8.0" +version = "2.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -3226,6 +3261,7 @@ dependencies = [ { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvshmem-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "setuptools", marker = "python_full_version >= '3.12'" }, { name = "sympy" }, @@ -3233,26 +3269,34 @@ dependencies = [ { name = "typing-extensions" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/63/28/110f7274254f1b8476c561dada127173f994afa2b1ffc044efb773c15650/torch-2.8.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:0be92c08b44009d4131d1ff7a8060d10bafdb7ddcb7359ef8d8c5169007ea905", size = 102052793, upload-time = "2025-08-06T14:53:15.852Z" }, - { url = "https://files.pythonhosted.org/packages/70/1c/58da560016f81c339ae14ab16c98153d51c941544ae568da3cb5b1ceb572/torch-2.8.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:89aa9ee820bb39d4d72b794345cccef106b574508dd17dbec457949678c76011", size = 888025420, upload-time = "2025-08-06T14:54:18.014Z" }, - { url = "https://files.pythonhosted.org/packages/70/87/f69752d0dd4ba8218c390f0438130c166fa264a33b7025adb5014b92192c/torch-2.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e8e5bf982e87e2b59d932769938b698858c64cc53753894be25629bdf5cf2f46", size = 241363614, upload-time = "2025-08-06T14:53:31.496Z" }, - { url = "https://files.pythonhosted.org/packages/ef/d6/e6d4c57e61c2b2175d3aafbfb779926a2cfd7c32eeda7c543925dceec923/torch-2.8.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:a3f16a58a9a800f589b26d47ee15aca3acf065546137fc2af039876135f4c760", size = 73611154, upload-time = "2025-08-06T14:53:10.919Z" }, - { url = "https://files.pythonhosted.org/packages/8f/c4/3e7a3887eba14e815e614db70b3b529112d1513d9dae6f4d43e373360b7f/torch-2.8.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:220a06fd7af8b653c35d359dfe1aaf32f65aa85befa342629f716acb134b9710", size = 102073391, upload-time = "2025-08-06T14:53:20.937Z" }, - { url = "https://files.pythonhosted.org/packages/5a/63/4fdc45a0304536e75a5e1b1bbfb1b56dd0e2743c48ee83ca729f7ce44162/torch-2.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c12fa219f51a933d5f80eeb3a7a5d0cbe9168c0a14bbb4055f1979431660879b", size = 888063640, upload-time = "2025-08-06T14:55:05.325Z" }, - { url = "https://files.pythonhosted.org/packages/84/57/2f64161769610cf6b1c5ed782bd8a780e18a3c9d48931319f2887fa9d0b1/torch-2.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c7ef765e27551b2fbfc0f41bcf270e1292d9bf79f8e0724848b1682be6e80aa", size = 241366752, upload-time = "2025-08-06T14:53:38.692Z" }, - { url = "https://files.pythonhosted.org/packages/a4/5e/05a5c46085d9b97e928f3f037081d3d2b87fb4b4195030fc099aaec5effc/torch-2.8.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:5ae0524688fb6707c57a530c2325e13bb0090b745ba7b4a2cd6a3ce262572916", size = 73621174, upload-time = "2025-08-06T14:53:25.44Z" }, - { url = "https://files.pythonhosted.org/packages/49/0c/2fd4df0d83a495bb5e54dca4474c4ec5f9c62db185421563deeb5dabf609/torch-2.8.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e2fab4153768d433f8ed9279c8133a114a034a61e77a3a104dcdf54388838705", size = 101906089, upload-time = "2025-08-06T14:53:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/99/a8/6acf48d48838fb8fe480597d98a0668c2beb02ee4755cc136de92a0a956f/torch-2.8.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2aca0939fb7e4d842561febbd4ffda67a8e958ff725c1c27e244e85e982173c", size = 887913624, upload-time = "2025-08-06T14:56:44.33Z" }, - { url = "https://files.pythonhosted.org/packages/af/8a/5c87f08e3abd825c7dfecef5a0f1d9aa5df5dd0e3fd1fa2f490a8e512402/torch-2.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:2f4ac52f0130275d7517b03a33d2493bab3693c83dcfadf4f81688ea82147d2e", size = 241326087, upload-time = "2025-08-06T14:53:46.503Z" }, - { url = "https://files.pythonhosted.org/packages/be/66/5c9a321b325aaecb92d4d1855421e3a055abd77903b7dab6575ca07796db/torch-2.8.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:619c2869db3ada2c0105487ba21b5008defcc472d23f8b80ed91ac4a380283b0", size = 73630478, upload-time = "2025-08-06T14:53:57.144Z" }, - { url = "https://files.pythonhosted.org/packages/10/4e/469ced5a0603245d6a19a556e9053300033f9c5baccf43a3d25ba73e189e/torch-2.8.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:2b2f96814e0345f5a5aed9bf9734efa913678ed19caf6dc2cddb7930672d6128", size = 101936856, upload-time = "2025-08-06T14:54:01.526Z" }, - { url = "https://files.pythonhosted.org/packages/16/82/3948e54c01b2109238357c6f86242e6ecbf0c63a1af46906772902f82057/torch-2.8.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:65616ca8ec6f43245e1f5f296603e33923f4c30f93d65e103d9e50c25b35150b", size = 887922844, upload-time = "2025-08-06T14:55:50.78Z" }, - { url = "https://files.pythonhosted.org/packages/e3/54/941ea0a860f2717d86a811adf0c2cd01b3983bdd460d0803053c4e0b8649/torch-2.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:659df54119ae03e83a800addc125856effda88b016dfc54d9f65215c3975be16", size = 241330968, upload-time = "2025-08-06T14:54:45.293Z" }, - { url = "https://files.pythonhosted.org/packages/de/69/8b7b13bba430f5e21d77708b616f767683629fc4f8037564a177d20f90ed/torch-2.8.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:1a62a1ec4b0498930e2543535cf70b1bef8c777713de7ceb84cd79115f553767", size = 73915128, upload-time = "2025-08-06T14:54:34.769Z" }, - { url = "https://files.pythonhosted.org/packages/15/0e/8a800e093b7f7430dbaefa80075aee9158ec22e4c4fc3c1a66e4fb96cb4f/torch-2.8.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:83c13411a26fac3d101fe8035a6b0476ae606deb8688e904e796a3534c197def", size = 102020139, upload-time = "2025-08-06T14:54:39.047Z" }, - { url = "https://files.pythonhosted.org/packages/4a/15/5e488ca0bc6162c86a33b58642bc577c84ded17c7b72d97e49b5833e2d73/torch-2.8.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:8f0a9d617a66509ded240add3754e462430a6c1fc5589f86c17b433dd808f97a", size = 887990692, upload-time = "2025-08-06T14:56:18.286Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a8/6a04e4b54472fc5dba7ca2341ab219e529f3c07b6941059fbf18dccac31f/torch-2.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a7242b86f42be98ac674b88a4988643b9bc6145437ec8f048fea23f72feb5eca", size = 241603453, upload-time = "2025-08-06T14:55:22.945Z" }, - { url = "https://files.pythonhosted.org/packages/04/6e/650bb7f28f771af0cb791b02348db8b7f5f64f40f6829ee82aa6ce99aabe/torch-2.8.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:7b677e17f5a3e69fdef7eb3b9da72622f8d322692930297e4ccb52fefc6c8211", size = 73632395, upload-time = "2025-08-06T14:55:28.645Z" }, + { url = "https://files.pythonhosted.org/packages/bb/86/245c240d2138c17ed572c943c289056c2721abab70810d772c6bf5495b28/torch-2.9.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:030bbfe367379ae6a4ae4042b6c44da25383343b8b3c68abaa9c7231efbaf2dd", size = 104213554, upload-time = "2025-10-15T15:45:59.798Z" }, + { url = "https://files.pythonhosted.org/packages/58/1d/fd1e88ae0948825efcab7dd66d12bec23f05d4d38ed81573c8d453c14c06/torch-2.9.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:51cb63902182a78e90886e8068befd8ea102af4b00e420263591a3d70c7d3c6c", size = 899795167, upload-time = "2025-10-15T15:47:12.695Z" }, + { url = "https://files.pythonhosted.org/packages/63/5a/496197b45c14982bef4e079b24c61dc108e3ab0d0cc9718dba9f54f45a46/torch-2.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:3f6aad4d2f0ee2248bac25339d74858ff846c3969b27d14ac235821f055af83d", size = 109310314, upload-time = "2025-10-15T15:46:16.633Z" }, + { url = "https://files.pythonhosted.org/packages/58/b0/2b4e647b0fc706e88eb6c253d05511865578f5f67b55fad639bf3272a4a1/torch-2.9.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:413e1654c9203733138858780e184d9fc59442f0b3b209e16f39354eb893db9b", size = 74452019, upload-time = "2025-10-15T15:46:04.296Z" }, + { url = "https://files.pythonhosted.org/packages/58/fe/334225e6330e672b36aef23d77451fa906ea12881570c08638a91331a212/torch-2.9.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c596708b5105d0b199215acf0c9be7c1db5f1680d88eddadf4b75a299259a677", size = 104230578, upload-time = "2025-10-15T15:46:08.182Z" }, + { url = "https://files.pythonhosted.org/packages/05/cc/49566caaa218872ec9a2912456f470ff92649894a4bc2e5274aa9ef87c4a/torch-2.9.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:51de31219c97c51cf4bf2be94d622e3deb5dcc526c6dc00e97c17eaec0fc1d67", size = 899815990, upload-time = "2025-10-15T15:48:03.336Z" }, + { url = "https://files.pythonhosted.org/packages/74/25/e9ab21d5925b642d008f139d4a3c9664fc9ee1faafca22913c080cc4c0a5/torch-2.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd515c70059afd95f48b8192733764c08ca37a1d19803af6401b5ecad7c8676e", size = 109313698, upload-time = "2025-10-15T15:46:12.425Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b7/205ef3e94de636feffd64b28bb59a0dfac0771221201b9871acf9236f5ca/torch-2.9.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:614a185e4986326d526a91210c8fc1397e76e8cfafa78baf6296a790e53a9eec", size = 74463678, upload-time = "2025-10-15T15:46:29.779Z" }, + { url = "https://files.pythonhosted.org/packages/d1/d3/3985739f3b8e88675127bf70f82b3a48ae083e39cda56305dbd90398fec0/torch-2.9.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e5f7af1dc4c0a7c4a260c2534f41ddaf209714f7c89145e644c44712fbd6b642", size = 104107898, upload-time = "2025-10-15T15:46:20.883Z" }, + { url = "https://files.pythonhosted.org/packages/a5/4b/f4bb2e6c25d0272f798cd6d7a04ed315da76cec68c602d87040c7847287f/torch-2.9.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:01cff95ecd9a212ea2f141db28acccdceb6a4c54f64e6c51091146f5e2a772c6", size = 899738273, upload-time = "2025-10-15T15:50:04.188Z" }, + { url = "https://files.pythonhosted.org/packages/66/11/c1c5ba6691cda6279087c35bd626536e4fd29521fe740abf5008377a9a02/torch-2.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:4582b162f541651f0cb184d3e291c05c2f556c7117c64a9873e2ee158d40062b", size = 109280887, upload-time = "2025-10-15T15:46:26.228Z" }, + { url = "https://files.pythonhosted.org/packages/dd/5f/b85bd8c05312d71de9402bf5868d217c38827cfd09d8f8514e5be128a52b/torch-2.9.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:33f58e9a102a91259af289d50525c30323b5c9ae1d31322b6447c0814da68695", size = 74478983, upload-time = "2025-10-15T15:46:39.406Z" }, + { url = "https://files.pythonhosted.org/packages/c2/1c/90eb13833cdf4969ea9707586d7b57095c3b6e2b223a7256bf111689bcb8/torch-2.9.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c30a17fc83eeab346913e237c64b15b5ba6407fff812f6c541e322e19bc9ea0e", size = 104111330, upload-time = "2025-10-15T15:46:35.238Z" }, + { url = "https://files.pythonhosted.org/packages/0e/21/2254c54b8d523592c25ef4434769aa23e29b1e6bf5f4c0ad9e27bf442927/torch-2.9.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:8f25033b8667b57857dfd01458fbf2a9e6a6df1f8def23aef0dc46292f6aa642", size = 899750243, upload-time = "2025-10-15T15:48:57.459Z" }, + { url = "https://files.pythonhosted.org/packages/b7/a5/5cb94fa4fd1e78223455c23c200f30f6dc10c6d4a2bcc8f6e7f2a2588370/torch-2.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:d037f1b4ffd25013be4a7bf3651a0a910c68554956c7b2c92ebe87c76475dece", size = 109284513, upload-time = "2025-10-15T15:46:45.061Z" }, + { url = "https://files.pythonhosted.org/packages/66/e8/fc414d8656250ee46120b44836ffbb3266343db424b3e18ca79ebbf69d4f/torch-2.9.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e4e5b5cba837a2a8d1a497ba9a58dae46fa392593eaa13b871c42f71847503a5", size = 74830362, upload-time = "2025-10-15T15:46:48.983Z" }, + { url = "https://files.pythonhosted.org/packages/ed/5f/9474c98fc5ae0cd04b9466035428cd360e6611a86b8352a0fc2fa504acdc/torch-2.9.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:64693568f5dc4dbd5f880a478b1cea0201cc6b510d91d1bc54fea86ac5d1a637", size = 104144940, upload-time = "2025-10-15T15:47:29.076Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5a/8e0c1cf57830172c109d4bd6be2708cabeaf550983eee7029291322447a0/torch-2.9.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:f8ed31ddd7d10bfb3fbe0b9fe01b1243577f13d75e6f4a0839a283915ce3791e", size = 899744054, upload-time = "2025-10-15T15:48:29.864Z" }, + { url = "https://files.pythonhosted.org/packages/6d/28/82c28b30fcb4b7c9cdd995763d18bbb830d6521356712faebbad92ffa61d/torch-2.9.0-cp313-cp313t-win_amd64.whl", hash = "sha256:eff527d4e4846e6f70d2afd8058b73825761203d66576a7e04ea2ecfebcb4ab8", size = 109517546, upload-time = "2025-10-15T15:47:33.395Z" }, + { url = "https://files.pythonhosted.org/packages/ff/c3/a91f96ec74347fa5fd24453fa514bc61c61ecc79196fa760b012a1873d96/torch-2.9.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:f8877779cf56d1ce431a7636703bdb13307f5960bb1af49716d8b179225e0e6a", size = 74480732, upload-time = "2025-10-15T15:47:38.002Z" }, + { url = "https://files.pythonhosted.org/packages/5c/73/9f70af34b334a7e0ef496ceec96b7ec767bd778ea35385ce6f77557534d1/torch-2.9.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7e614fae699838038d888729f82b687c03413c5989ce2a9481f9a7e7a396e0bb", size = 74433037, upload-time = "2025-10-15T15:47:41.894Z" }, + { url = "https://files.pythonhosted.org/packages/b7/84/37cf88625901934c97109e583ecc21777d21c6f54cda97a7e5bbad1ee2f2/torch-2.9.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:dfb5b8cd310ba3436c7e14e8b7833ef658cf3045e50d2bdaed23c8fc517065eb", size = 104116482, upload-time = "2025-10-15T15:47:46.266Z" }, + { url = "https://files.pythonhosted.org/packages/56/8e/ca8b17866943a8d4f4664d402ea84210aa274588b4c5d89918f5caa24eec/torch-2.9.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:b3d29524993a478e46f5d598b249cd824b7ed98d7fba538bd9c4cde6c803948f", size = 899746916, upload-time = "2025-10-15T15:50:40.294Z" }, + { url = "https://files.pythonhosted.org/packages/43/65/3b17c0fbbdab6501c5b320a52a648628d0d44e7379f64e27d9eef701b6bf/torch-2.9.0-cp314-cp314-win_amd64.whl", hash = "sha256:71c7578984f5ec0eb645eb4816ac8435fcf3e3e2ae1901bcd2f519a9cafb5125", size = 109275151, upload-time = "2025-10-15T15:49:20.715Z" }, + { url = "https://files.pythonhosted.org/packages/83/36/74f8c051f785500396e42f93542422422dfd874a174f21f8d955d36e5d64/torch-2.9.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:71d9309aee457bbe0b164bce2111cd911c4ed4e847e65d5077dbbcd3aba6befc", size = 74823353, upload-time = "2025-10-15T15:49:16.59Z" }, + { url = "https://files.pythonhosted.org/packages/62/51/dc3b4e2f9ba98ae27238f0153ca098bf9340b2dafcc67fde645d496dfc2a/torch-2.9.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c08fb654d783899e204a32cca758a7ce8a45b2d78eeb89517cc937088316f78e", size = 104140340, upload-time = "2025-10-15T15:50:19.67Z" }, + { url = "https://files.pythonhosted.org/packages/c0/8d/b00657f8141ac16af7bb6cda2e67de18499a3263b78d516b9a93fcbc98e3/torch-2.9.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ec8feb0099b2daa5728fbc7abb0b05730fd97e0f359ff8bda09865aaa7bd7d4b", size = 899731750, upload-time = "2025-10-15T15:49:36.673Z" }, + { url = "https://files.pythonhosted.org/packages/fc/29/bd361e0cbb2c79ce6450f42643aaf6919956f89923a50571b0ebfe92d142/torch-2.9.0-cp314-cp314t-win_amd64.whl", hash = "sha256:695ba920f234ad4170c9c50e28d56c848432f8f530e6bc7f88fcb15ddf338e75", size = 109503850, upload-time = "2025-10-15T15:50:24.118Z" }, ] [[package]] @@ -3319,17 +3363,16 @@ wheels = [ [[package]] name = "triton" -version = "3.4.0" +version = "3.5.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "setuptools" }, -] wheels = [ - { url = "https://files.pythonhosted.org/packages/62/ee/0ee5f64a87eeda19bbad9bc54ae5ca5b98186ed00055281fd40fb4beb10e/triton-3.4.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ff2785de9bc02f500e085420273bb5cc9c9bb767584a4aa28d6e360cec70128", size = 155430069, upload-time = "2025-07-30T19:58:21.715Z" }, - { url = "https://files.pythonhosted.org/packages/7d/39/43325b3b651d50187e591eefa22e236b2981afcebaefd4f2fc0ea99df191/triton-3.4.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b70f5e6a41e52e48cfc087436c8a28c17ff98db369447bcaff3b887a3ab4467", size = 155531138, upload-time = "2025-07-30T19:58:29.908Z" }, - { url = "https://files.pythonhosted.org/packages/d0/66/b1eb52839f563623d185f0927eb3530ee4d5ffe9d377cdaf5346b306689e/triton-3.4.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:31c1d84a5c0ec2c0f8e8a072d7fd150cab84a9c239eaddc6706c081bfae4eb04", size = 155560068, upload-time = "2025-07-30T19:58:37.081Z" }, - { url = "https://files.pythonhosted.org/packages/30/7b/0a685684ed5322d2af0bddefed7906674f67974aa88b0fae6e82e3b766f6/triton-3.4.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00be2964616f4c619193cb0d1b29a99bd4b001d7dc333816073f92cf2a8ccdeb", size = 155569223, upload-time = "2025-07-30T19:58:44.017Z" }, - { url = "https://files.pythonhosted.org/packages/20/63/8cb444ad5cdb25d999b7d647abac25af0ee37d292afc009940c05b82dda0/triton-3.4.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7936b18a3499ed62059414d7df563e6c163c5e16c3773678a3ee3d417865035d", size = 155659780, upload-time = "2025-07-30T19:58:51.171Z" }, + { url = "https://files.pythonhosted.org/packages/0b/eb/09e31d107a5d00eb281aa7e6635ca463e9bca86515944e399480eadb71f8/triton-3.5.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5d3b3d480debf24eaa739623c9a42446b0b77f95593d30eb1f64cd2278cc1f0", size = 170333110, upload-time = "2025-10-13T16:37:49.588Z" }, + { url = "https://files.pythonhosted.org/packages/3d/78/949a04391c21956c816523678f0e5fa308eb5b1e7622d88c4e4ef5fceca0/triton-3.5.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f34bfa21c5b3a203c0f0eab28dcc1e49bd1f67d22724e77fb6665a659200a4ec", size = 170433488, upload-time = "2025-10-13T16:37:57.132Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3a/e991574f3102147b642e49637e0281e9bb7c4ba254edb2bab78247c85e01/triton-3.5.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9e71db82261c4ffa3921cd050cd5faa18322d2d405c30eb56084afaff3b0833", size = 170476535, upload-time = "2025-10-13T16:38:05.18Z" }, + { url = "https://files.pythonhosted.org/packages/6c/29/10728de8a6e932e517c10773486b8e99f85d1b1d9dd87d9a9616e1fef4a1/triton-3.5.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e6bb9aa5519c084a333acdba443789e50012a4b851cd486c54f0b8dc2a8d3a12", size = 170487289, upload-time = "2025-10-13T16:38:11.662Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/db80e48b9220c9bce872b0f616ad0446cdf554a40b85c7865cbca99ab3c2/triton-3.5.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c83f2343e1a220a716c7b3ab9fccfcbe3ad4020d189549200e2d2e8d5868bed9", size = 170577179, upload-time = "2025-10-13T16:38:17.865Z" }, + { url = "https://files.pythonhosted.org/packages/ff/60/1810655d1d856c9a4fcc90ee8966d85f552d98c53a6589f95ab2cbe27bb8/triton-3.5.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da0fa67ccd76c3dcfb0bffe1b1c57c685136a6bd33d141c24d9655d4185b1289", size = 170487949, upload-time = "2025-10-13T16:38:24.881Z" }, + { url = "https://files.pythonhosted.org/packages/fb/b7/1dec8433ac604c061173d0589d99217fe7bf90a70bdc375e745d044b8aad/triton-3.5.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:317fe477ea8fd4524a6a8c499fb0a36984a56d0b75bf9c9cb6133a1c56d5a6e7", size = 170580176, upload-time = "2025-10-13T16:38:31.14Z" }, ] [[package]] @@ -3356,14 +3399,14 @@ wheels = [ [[package]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] diff --git a/libs/partners/mistralai/README.md b/libs/partners/mistralai/README.md index 2882972c74b..1caf2ca5fcd 100644 --- a/libs/partners/mistralai/README.md +++ b/libs/partners/mistralai/README.md @@ -1,3 +1,18 @@ # langchain-mistralai +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-mistralai?label=%20)](https://pypi.org/project/langchain-mistralai/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-mistralai)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-mistralai)](https://pypistats.org/packages/langchain-mistralai) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-mistralai +``` + +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/mistralai) for more details. diff --git a/libs/partners/mistralai/langchain_mistralai/_compat.py b/libs/partners/mistralai/langchain_mistralai/_compat.py new file mode 100644 index 00000000000..f716300dccc --- /dev/null +++ b/libs/partners/mistralai/langchain_mistralai/_compat.py @@ -0,0 +1,125 @@ +"""Derivations of standard content blocks from mistral content.""" + +from __future__ import annotations + +from langchain_core.messages import AIMessage, AIMessageChunk +from langchain_core.messages import content as types +from langchain_core.messages.block_translators import register_translator + + +def _convert_from_v1_to_mistral( + content: list[types.ContentBlock], + model_provider: str | None, +) -> str | list[str | dict]: + new_content: list = [] + for block in content: + if block["type"] == "text": + new_content.append({"text": block.get("text", ""), "type": "text"}) + + elif ( + block["type"] == "reasoning" + and (reasoning := block.get("reasoning")) + and isinstance(reasoning, str) + and model_provider == "mistralai" + ): + new_content.append( + { + "type": "thinking", + "thinking": [{"type": "text", "text": reasoning}], + } + ) + + elif ( + block["type"] == "non_standard" + and "value" in block + and model_provider == "mistralai" + ): + new_content.append(block["value"]) + elif block["type"] == "tool_call": + continue + else: + new_content.append(block) + + return new_content + + +def _convert_to_v1_from_mistral(message: AIMessage) -> list[types.ContentBlock]: + """Convert mistral message content to v1 format.""" + if isinstance(message.content, str): + content_blocks: list[types.ContentBlock] = [ + {"type": "text", "text": message.content} + ] + + else: + content_blocks = [] + for block in message.content: + if isinstance(block, str): + content_blocks.append({"type": "text", "text": block}) + + elif isinstance(block, dict): + if block.get("type") == "text" and isinstance(block.get("text"), str): + text_block: types.TextContentBlock = { + "type": "text", + "text": block["text"], + } + if "index" in block: + text_block["index"] = block["index"] + content_blocks.append(text_block) + + elif block.get("type") == "thinking" and isinstance( + block.get("thinking"), list + ): + for sub_block in block["thinking"]: + if ( + isinstance(sub_block, dict) + and sub_block.get("type") == "text" + ): + reasoning_block: types.ReasoningContentBlock = { + "type": "reasoning", + "reasoning": sub_block.get("text", ""), + } + if "index" in block: + reasoning_block["index"] = block["index"] + content_blocks.append(reasoning_block) + + else: + non_standard_block: types.NonStandardContentBlock = { + "type": "non_standard", + "value": block, + } + content_blocks.append(non_standard_block) + else: + continue + + if ( + len(content_blocks) == 1 + and content_blocks[0].get("type") == "text" + and content_blocks[0].get("text") == "" + and message.tool_calls + ): + content_blocks = [] + + for tool_call in message.tool_calls: + content_blocks.append( + { + "type": "tool_call", + "name": tool_call["name"], + "args": tool_call["args"], + "id": tool_call.get("id"), + } + ) + + return content_blocks + + +def translate_content(message: AIMessage) -> list[types.ContentBlock]: + """Derive standard content blocks from a message with mistral content.""" + return _convert_to_v1_from_mistral(message) + + +def translate_content_chunk(message: AIMessageChunk) -> list[types.ContentBlock]: + """Derive standard content blocks from a message chunk with mistral content.""" + return _convert_to_v1_from_mistral(message) + + +register_translator("mistralai", translate_content, translate_content_chunk) diff --git a/libs/partners/mistralai/langchain_mistralai/chat_models.py b/libs/partners/mistralai/langchain_mistralai/chat_models.py index 88e999db270..660c38614c2 100644 --- a/libs/partners/mistralai/langchain_mistralai/chat_models.py +++ b/libs/partners/mistralai/langchain_mistralai/chat_models.py @@ -24,12 +24,7 @@ from langchain_core.callbacks import ( CallbackManagerForLLMRun, ) from langchain_core.language_models import LanguageModelInput -from langchain_core.language_models.chat_models import ( - BaseChatModel, - LangSmithParams, - agenerate_from_stream, - generate_from_stream, -) +from langchain_core.language_models.chat_models import BaseChatModel, LangSmithParams from langchain_core.language_models.llms import create_base_retry_decorator from langchain_core.messages import ( AIMessage, @@ -74,6 +69,8 @@ from pydantic import ( ) from typing_extensions import Self +from langchain_mistralai._compat import _convert_from_v1_to_mistral + if TYPE_CHECKING: from collections.abc import AsyncIterator, Iterator, Sequence from contextlib import AbstractAsyncContextManager @@ -160,6 +157,7 @@ def _convert_mistral_chat_message_to_message( additional_kwargs=additional_kwargs, tool_calls=tool_calls, invalid_tool_calls=invalid_tool_calls, + response_metadata={"model_provider": "mistralai"}, ) @@ -231,14 +229,34 @@ async def acompletion_with_retry( def _convert_chunk_to_message_chunk( - chunk: dict, default_class: type[BaseMessageChunk] -) -> BaseMessageChunk: + chunk: dict, + default_class: type[BaseMessageChunk], + index: int, + index_type: str, + output_version: str | None, +) -> tuple[BaseMessageChunk, int, str]: _choice = chunk["choices"][0] _delta = _choice["delta"] role = _delta.get("role") content = _delta.get("content") or "" + if output_version == "v1" and isinstance(content, str): + content = [{"type": "text", "text": content}] + if isinstance(content, list): + for block in content: + if isinstance(block, dict): + if "type" in block and block["type"] != index_type: + index_type = block["type"] + index = index + 1 + if "index" not in block: + block["index"] = index + if block.get("type") == "thinking" and isinstance( + block.get("thinking"), list + ): + for sub_block in block["thinking"]: + if isinstance(sub_block, dict) and "index" not in sub_block: + sub_block["index"] = 0 if role == "user" or default_class == HumanMessageChunk: - return HumanMessageChunk(content=content) + return HumanMessageChunk(content=content), index, index_type if role == "assistant" or default_class == AIMessageChunk: additional_kwargs: dict = {} response_metadata = {} @@ -276,18 +294,22 @@ def _convert_chunk_to_message_chunk( ): response_metadata["model_name"] = chunk["model"] response_metadata["finish_reason"] = _choice["finish_reason"] - return AIMessageChunk( - content=content, - additional_kwargs=additional_kwargs, - tool_call_chunks=tool_call_chunks, # type: ignore[arg-type] - usage_metadata=usage_metadata, # type: ignore[arg-type] - response_metadata=response_metadata, + return ( + AIMessageChunk( + content=content, + additional_kwargs=additional_kwargs, + tool_call_chunks=tool_call_chunks, # type: ignore[arg-type] + usage_metadata=usage_metadata, # type: ignore[arg-type] + response_metadata={"model_provider": "mistralai", **response_metadata}, + ), + index, + index_type, ) if role == "system" or default_class == SystemMessageChunk: - return SystemMessageChunk(content=content) + return SystemMessageChunk(content=content), index, index_type if role or default_class == ChatMessageChunk: - return ChatMessageChunk(content=content, role=role) - return default_class(content=content) # type: ignore[call-arg] + return ChatMessageChunk(content=content, role=role), index, index_type + return default_class(content=content), index, index_type # type: ignore[call-arg] def _format_tool_call_for_mistral(tool_call: ToolCall) -> dict: @@ -318,6 +340,21 @@ def _format_invalid_tool_call_for_mistral(invalid_tool_call: InvalidToolCall) -> return result +def _clean_block(block: dict) -> dict: + # Remove "index" key added for message aggregation in langchain-core + new_block = {k: v for k, v in block.items() if k != "index"} + if block.get("type") == "thinking" and isinstance(block.get("thinking"), list): + new_block["thinking"] = [ + ( + {k: v for k, v in sb.items() if k != "index"} + if isinstance(sb, dict) and "index" in sb + else sb + ) + for sb in block["thinking"] + ] + return new_block + + def _convert_message_to_mistral_chat_message( message: BaseMessage, ) -> dict: @@ -327,16 +364,14 @@ def _convert_message_to_mistral_chat_message( return {"role": "user", "content": message.content} if isinstance(message, AIMessage): message_dict: dict[str, Any] = {"role": "assistant"} - tool_calls = [] + tool_calls: list = [] if message.tool_calls or message.invalid_tool_calls: - for tool_call in message.tool_calls: + if message.tool_calls: tool_calls.extend( - [ - _format_tool_call_for_mistral(tool_call) - for tool_call in message.tool_calls - ] + _format_tool_call_for_mistral(tool_call) + for tool_call in message.tool_calls ) - for invalid_tool_call in message.invalid_tool_calls: + if message.invalid_tool_calls: tool_calls.extend( _format_invalid_tool_call_for_mistral(invalid_tool_call) for invalid_tool_call in message.invalid_tool_calls @@ -356,13 +391,40 @@ def _convert_message_to_mistral_chat_message( pass if tool_calls: # do not populate empty list tool_calls message_dict["tool_calls"] = tool_calls - if tool_calls and message.content: + + # Message content + # Translate v1 content + if message.response_metadata.get("output_version") == "v1": + content = _convert_from_v1_to_mistral( + message.content_blocks, message.response_metadata.get("model_provider") + ) + else: + content = message.content + + if tool_calls and content: # Assistant message must have either content or tool_calls, but not both. # Some providers may not support tool_calls in the same message as content. # This is done to ensure compatibility with messages from other providers. - message_dict["content"] = "" + content = "" + + elif isinstance(content, list): + content = [ + _clean_block(block) + if isinstance(block, dict) and "index" in block + else block + for block in content + ] else: - message_dict["content"] = message.content + content = message.content + + # if any blocks are dicts, cast strings to text blocks + if any(isinstance(block, dict) for block in content): + content = [ + block if isinstance(block, dict) else {"type": "text", "text": block} + for block in content + ] + message_dict["content"] = content + if "prefix" in message.additional_kwargs: message_dict["prefix"] = message.additional_kwargs["prefix"] return message_dict @@ -382,7 +444,7 @@ def _convert_message_to_mistral_chat_message( class ChatMistralAI(BaseChatModel): - """A chat model that uses the MistralAI API.""" + """A chat model that uses the Mistral AI API.""" # The type for client and async_client is ignored because the type is not # an Optional after the model is initialized and the model_validator @@ -390,27 +452,41 @@ class ChatMistralAI(BaseChatModel): client: httpx.Client = Field( # type: ignore[assignment] # : meta private: default=None, exclude=True ) + async_client: httpx.AsyncClient = Field( # type: ignore[assignment] # : meta private: default=None, exclude=True - ) #: :meta private: + ) + mistral_api_key: SecretStr | None = Field( alias="api_key", default_factory=secret_from_env("MISTRAL_API_KEY", default=None), ) + endpoint: str | None = Field(default=None, alias="base_url") + max_retries: int = 5 + timeout: int = 120 + max_concurrent_requests: int = 64 + model: str = Field(default="mistral-small", alias="model_name") + temperature: float = 0.7 + max_tokens: int | None = None + top_p: float = 1 """Decode using nucleus sampling: consider the smallest set of tokens whose probability sum is at least `top_p`. Must be in the closed interval `[0.0, 1.0]`.""" + random_seed: int | None = None + safe_mode: bool | None = None + streaming: bool = False + model_kwargs: dict[str, Any] = Field(default_factory=dict) """Holds any invocation parameters not explicitly specified.""" @@ -564,13 +640,6 @@ class ChatMistralAI(BaseChatModel): stream: bool | None = None, # noqa: FBT001 **kwargs: Any, ) -> ChatResult: - should_stream = stream if stream is not None else self.streaming - if should_stream: - stream_iter = self._stream( - messages, stop=stop, run_manager=run_manager, **kwargs - ) - return generate_from_stream(stream_iter) - message_dicts, params = self._create_message_dicts(messages, stop) params = {**params, **kwargs} response = self.completion_with_retry( @@ -627,12 +696,16 @@ class ChatMistralAI(BaseChatModel): params = {**params, **kwargs, "stream": True} default_chunk_class: type[BaseMessageChunk] = AIMessageChunk + index = -1 + index_type = "" for chunk in self.completion_with_retry( messages=message_dicts, run_manager=run_manager, **params ): if len(chunk.get("choices", [])) == 0: continue - new_chunk = _convert_chunk_to_message_chunk(chunk, default_chunk_class) + new_chunk, index, index_type = _convert_chunk_to_message_chunk( + chunk, default_chunk_class, index, index_type, self.output_version + ) # make future chunks same type as first chunk default_chunk_class = new_chunk.__class__ gen_chunk = ChatGenerationChunk(message=new_chunk) @@ -653,12 +726,16 @@ class ChatMistralAI(BaseChatModel): params = {**params, **kwargs, "stream": True} default_chunk_class: type[BaseMessageChunk] = AIMessageChunk + index = -1 + index_type = "" async for chunk in await acompletion_with_retry( self, messages=message_dicts, run_manager=run_manager, **params ): if len(chunk.get("choices", [])) == 0: continue - new_chunk = _convert_chunk_to_message_chunk(chunk, default_chunk_class) + new_chunk, index, index_type = _convert_chunk_to_message_chunk( + chunk, default_chunk_class, index, index_type, self.output_version + ) # make future chunks same type as first chunk default_chunk_class = new_chunk.__class__ gen_chunk = ChatGenerationChunk(message=new_chunk) @@ -676,13 +753,6 @@ class ChatMistralAI(BaseChatModel): stream: bool | None = None, # noqa: FBT001 **kwargs: Any, ) -> ChatResult: - should_stream = stream if stream is not None else self.streaming - if should_stream: - stream_iter = self._astream( - messages=messages, stop=stop, run_manager=run_manager, **kwargs - ) - return await agenerate_from_stream(stream_iter) - message_dicts, params = self._create_message_dicts(messages, stop) params = {**params, **kwargs} response = await acompletion_with_retry( @@ -747,20 +817,19 @@ class ChatMistralAI(BaseChatModel): Args: schema: The output schema. Can be passed in as: - - an OpenAI function/tool schema, - - a JSON Schema, - - a `TypedDict` class (support added in 0.1.12), - - or a Pydantic class. + - An OpenAI function/tool schema, + - A JSON Schema, + - A `TypedDict` class, + - Or a Pydantic class. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. - !!! warning "Behavior changed in 0.1.12" - Added support for TypedDict class. + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. method: The method for steering model generation, one of: @@ -777,16 +846,22 @@ class ChatMistralAI(BaseChatModel): must include instructions for formatting the output into the desired schema into the model call. - !!! warning "Behavior changed in 0.2.5" + !!! warning "Behavior changed in `langchain-mistralai` 0.2.5" Added method="json_schema" include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. kwargs: Any additional parameters are passed directly to `self.bind(**kwargs)`. This is useful for passing in @@ -795,184 +870,190 @@ class ChatMistralAI(BaseChatModel): `stop` to control when the model should stop generating output. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is False and `schema` is a Pydantic class, Runnable outputs - an instance of `schema` (i.e., a Pydantic object). + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - Otherwise, if `include_raw` is False then Runnable outputs a dict. - - If `include_raw` is True, then Runnable outputs a dict with keys: - - `'raw'`: BaseMessage - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` Example: schema=Pydantic class, method="function_calling", include_raw=False: - ```python - from typing import Optional - from langchain_mistralai import ChatMistralAI - from pydantic import BaseModel, Field + ```python + from typing import Optional + + from langchain_mistralai import ChatMistralAI + from pydantic import BaseModel, Field - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - # If we provide default values and/or descriptions for fields, these will be passed - # to the model. This is an important part of improving a model's ability to - # correctly return structured outputs. - justification: str | None = Field( - default=None, description="A justification for the answer." - ) - - - llm = ChatMistralAI(model="mistral-large-latest", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) - - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" + answer: str + # If we provide default values and/or descriptions for fields, these will be passed + # to the model. This is an important part of improving a model's ability to + # correctly return structured outputs. + justification: str | None = Field( + default=None, description="A justification for the answer." ) - # -> AnswerWithJustification( - # answer='They weigh the same', - # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' - # ) - ``` + + model = ChatMistralAI(model="mistral-large-latest", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification) + + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) + + # -> AnswerWithJustification( + # answer='They weigh the same', + # justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.' + # ) + ``` Example: schema=Pydantic class, method="function_calling", include_raw=True: - ```python - from langchain_mistralai import ChatMistralAI - from pydantic import BaseModel + + ```python + from langchain_mistralai import ChatMistralAI + from pydantic import BaseModel - class AnswerWithJustification(BaseModel): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(BaseModel): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: str + answer: str + justification: str - llm = ChatMistralAI(model="mistral-large-latest", temperature=0) - structured_llm = llm.with_structured_output( - AnswerWithJustification, include_raw=True - ) + model = ChatMistralAI(model="mistral-large-latest", temperature=0) + structured_model = model.with_structured_output( + AnswerWithJustification, include_raw=True + ) - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" - ) - # -> { - # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), - # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), - # 'parsing_error': None - # } - ``` + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) + # -> { + # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), + # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), + # 'parsing_error': None + # } + ``` Example: schema=TypedDict class, method="function_calling", include_raw=False: - ```python - from typing_extensions import Annotated, TypedDict - from langchain_mistralai import ChatMistralAI + ```python + from typing_extensions import Annotated, TypedDict + + from langchain_mistralai import ChatMistralAI - class AnswerWithJustification(TypedDict): - '''An answer to the user question along with justification for the answer.''' + class AnswerWithJustification(TypedDict): + '''An answer to the user question along with justification for the answer.''' - answer: str - justification: Annotated[ - str | None, None, "A justification for the answer." - ] + answer: str + justification: Annotated[ + str | None, None, "A justification for the answer." + ] - llm = ChatMistralAI(model="mistral-large-latest", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = ChatMistralAI(model="mistral-large-latest", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke( + structured_model.invoke( + "What weighs more a pound of bricks or a pound of feathers" + ) + # -> { + # 'answer': 'They weigh the same', + # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' + # } + ``` + + Example: schema=OpenAI function schema, method="function_calling", include_raw=False: + + ```python + from langchain_mistralai import ChatMistralAI + + oai_schema = { + 'name': 'AnswerWithJustification', + 'description': 'An answer to the user question along with justification for the answer.', + 'parameters': { + 'type': 'object', + 'properties': { + 'answer': {'type': 'string'}, + 'justification': {'description': 'A justification for the answer.', 'type': 'string'} + }, + 'required': ['answer'] + } + + model = ChatMistralAI(model="mistral-large-latest", temperature=0) + structured_model = model.with_structured_output(oai_schema) + + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) # -> { # 'answer': 'They weigh the same', # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' # } - ``` - - Example: schema=OpenAI function schema, method="function_calling", include_raw=False: - ```python - from langchain_mistralai import ChatMistralAI - - oai_schema = { - 'name': 'AnswerWithJustification', - 'description': 'An answer to the user question along with justification for the answer.', - 'parameters': { - 'type': 'object', - 'properties': { - 'answer': {'type': 'string'}, - 'justification': {'description': 'A justification for the answer.', 'type': 'string'} - }, - 'required': ['answer'] - } - - llm = ChatMistralAI(model="mistral-large-latest", temperature=0) - structured_llm = llm.with_structured_output(oai_schema) - - structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" - ) - # -> { - # 'answer': 'They weigh the same', - # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' - # } - ``` + ``` Example: schema=Pydantic class, method="json_mode", include_raw=True: - ```python - from langchain_mistralai import ChatMistralAI - from pydantic import BaseModel + + ```python + from langchain_mistralai import ChatMistralAI + from pydantic import BaseModel - class AnswerWithJustification(BaseModel): - answer: str - justification: str + class AnswerWithJustification(BaseModel): + answer: str + justification: str - llm = ChatMistralAI(model="mistral-large-latest", temperature=0) - structured_llm = llm.with_structured_output( - AnswerWithJustification, method="json_mode", include_raw=True - ) + model = ChatMistralAI(model="mistral-large-latest", temperature=0) + structured_model = model.with_structured_output( + AnswerWithJustification, method="json_mode", include_raw=True + ) - structured_llm.invoke( - "Answer the following question. " - "Make sure to return a JSON blob with keys 'answer' and 'justification'.\\n\\n" - "What's heavier a pound of bricks or a pound of feathers?" - ) - # -> { - # 'raw': AIMessage(content='{\\n "answer": "They are both the same weight.",\\n "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight." \\n}'), - # 'parsed': AnswerWithJustification(answer='They are both the same weight.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.'), - # 'parsing_error': None - # } - ``` + structured_model.invoke( + "Answer the following question. " + "Make sure to return a JSON blob with keys 'answer' and 'justification'.\\n\\n" + "What's heavier a pound of bricks or a pound of feathers?" + ) + # -> { + # 'raw': AIMessage(content='{\\n "answer": "They are both the same weight.",\\n "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight." \\n}'), + # 'parsed': AnswerWithJustification(answer='They are both the same weight.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.'), + # 'parsing_error': None + # } + ``` Example: schema=None, method="json_mode", include_raw=True: - ```python - structured_llm = llm.with_structured_output( - method="json_mode", include_raw=True - ) - structured_llm.invoke( - "Answer the following question. " - "Make sure to return a JSON blob with keys 'answer' and 'justification'.\\n\\n" - "What's heavier a pound of bricks or a pound of feathers?" - ) - # -> { - # 'raw': AIMessage(content='{\\n "answer": "They are both the same weight.",\\n "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight." \\n}'), - # 'parsed': { - # 'answer': 'They are both the same weight.', - # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.' - # }, - # 'parsing_error': None - # } - ``` + ```python + structured_model = model.with_structured_output( + method="json_mode", include_raw=True + ) + structured_model.invoke( + "Answer the following question. " + "Make sure to return a JSON blob with keys 'answer' and 'justification'.\\n\\n" + "What's heavier a pound of bricks or a pound of feathers?" + ) + # -> { + # 'raw': AIMessage(content='{\\n "answer": "They are both the same weight.",\\n "justification": "Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight." \\n}'), + # 'parsed': { + # 'answer': 'They are both the same weight.', + # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The difference lies in the volume and density of the materials, not the weight.' + # }, + # 'parsing_error': None + # } + ``` """ # noqa: E501 _ = kwargs.pop("strict", None) if kwargs: @@ -1075,7 +1156,11 @@ class ChatMistralAI(BaseChatModel): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object.""" + """Get the namespace of the LangChain object. + + Returns: + `["langchain", "chat_models", "mistralai"]` + """ return ["langchain", "chat_models", "mistralai"] diff --git a/libs/partners/mistralai/langchain_mistralai/embeddings.py b/libs/partners/mistralai/langchain_mistralai/embeddings.py index 26093d15c20..7c0ad474c68 100644 --- a/libs/partners/mistralai/langchain_mistralai/embeddings.py +++ b/libs/partners/mistralai/langchain_mistralai/embeddings.py @@ -1,7 +1,7 @@ import asyncio import logging import warnings -from collections.abc import Iterable +from collections.abc import Callable, Iterable import httpx from httpx import Response @@ -50,21 +50,22 @@ class MistralAIEmbeddings(BaseModel, Embeddings): ``` Key init args β€” completion params: - model: str - Name of MistralAI model to use. + model: + Name of `MistralAI` model to use. Key init args β€” client params: - api_key: SecretStr | None + api_key: The API key for the MistralAI API. If not provided, it will be read from the environment variable `MISTRAL_API_KEY`. - max_retries: int + max_concurrent_requests: int + max_retries: The number of times to retry a request if it fails. - timeout: int + timeout: The number of seconds to wait for a response before timing out. - wait_time: int + wait_time: The number of seconds to wait before retrying a request in case of 429 error. - max_concurrent_requests: int + max_concurrent_requests: The maximum number of concurrent requests to make to the Mistral API. See full list of supported init args and their descriptions in the params section. @@ -123,20 +124,27 @@ class MistralAIEmbeddings(BaseModel, Embeddings): # The type for client and async_client is ignored because the type is not # an Optional after the model is initialized and the model_validator # is run. - client: httpx.Client = Field(default=None) # type: ignore[assignment] # :meta private: + client: httpx.Client = Field(default=None) # type: ignore[assignment] - async_client: httpx.AsyncClient = Field( # type: ignore[assignment] # :meta private: + async_client: httpx.AsyncClient = Field( # type: ignore[assignment] default=None ) + mistral_api_key: SecretStr = Field( alias="api_key", default_factory=secret_from_env("MISTRAL_API_KEY", default=""), ) + endpoint: str = "https://api.mistral.ai/v1/" - max_retries: int = 5 + + max_retries: int | None = 5 + timeout: int = 120 - wait_time: int = 30 + + wait_time: int | None = 30 + max_concurrent_requests: int = 64 + tokenizer: Tokenizer = Field(default=None) model: str = "mistral-embed" @@ -212,6 +220,18 @@ class MistralAIEmbeddings(BaseModel, Embeddings): if batch: yield batch + def _retry(self, func: Callable) -> Callable: + if self.max_retries is None or self.wait_time is None: + return func + + return retry( + retry=retry_if_exception_type( + (httpx.TimeoutException, httpx.HTTPStatusError) + ), + wait=wait_fixed(self.wait_time), + stop=stop_after_attempt(self.max_retries), + )(func) + def embed_documents(self, texts: list[str]) -> list[list[float]]: """Embed a list of document texts. @@ -225,13 +245,7 @@ class MistralAIEmbeddings(BaseModel, Embeddings): try: batch_responses = [] - @retry( - retry=retry_if_exception_type( - (httpx.TimeoutException, httpx.HTTPStatusError) - ), - wait=wait_fixed(self.wait_time), - stop=stop_after_attempt(self.max_retries), - ) + @self._retry def _embed_batch(batch: list[str]) -> Response: response = self.client.post( url="/embeddings", @@ -266,13 +280,7 @@ class MistralAIEmbeddings(BaseModel, Embeddings): """ try: - @retry( - retry=retry_if_exception_type( - (httpx.TimeoutException, httpx.HTTPStatusError) - ), - wait=wait_fixed(self.wait_time), - stop=stop_after_attempt(self.max_retries), - ) + @self._retry async def _aembed_batch(batch: list[str]) -> Response: response = await self.async_client.post( url="/embeddings", diff --git a/libs/partners/mistralai/pyproject.toml b/libs/partners/mistralai/pyproject.toml index 7b3f93045e1..5221a40ac6a 100644 --- a/libs/partners/mistralai/pyproject.toml +++ b/libs/partners/mistralai/pyproject.toml @@ -3,29 +3,30 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-mistralai" +description = "An integration package connecting Mistral and LangChain" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.1" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", "tokenizers>=0.15.1,<1.0.0", "httpx>=0.25.2,<1.0.0", "httpx-sse>=0.3.1,<1.0.0", "pydantic>=2.0.0,<3.0.0", ] -name = "langchain-mistralai" -version = "1.0.0a1" -description = "An integration package connecting Mistral and LangChain" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/mistralai" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/mistralai" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-mistralai%22" -docs = "https://reference.langchain.com/python/integrations/langchain_mistralai/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/mistralai" +Documentation = "https://reference.langchain.com/python/integrations/langchain_mistralai/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/mistralai" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-mistralai%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ diff --git a/libs/partners/mistralai/tests/integration_tests/test_chat_models.py b/libs/partners/mistralai/tests/integration_tests/test_chat_models.py index a013e3a2bd9..bef96ae3e14 100644 --- a/libs/partners/mistralai/tests/integration_tests/test_chat_models.py +++ b/libs/partners/mistralai/tests/integration_tests/test_chat_models.py @@ -2,33 +2,19 @@ from __future__ import annotations -import json import logging import time from typing import Any import pytest from httpx import ReadTimeout -from langchain_core.messages import ( - AIMessage, - AIMessageChunk, - BaseMessageChunk, - HumanMessage, -) +from langchain_core.messages import AIMessageChunk, BaseMessageChunk from pydantic import BaseModel from typing_extensions import TypedDict from langchain_mistralai.chat_models import ChatMistralAI -def test_stream() -> None: - """Test streaming tokens from ChatMistralAI.""" - llm = ChatMistralAI() - - for token in llm.stream("Hello"): - assert isinstance(token.content, str) - - async def test_astream() -> None: """Test streaming tokens from ChatMistralAI.""" llm = ChatMistralAI() @@ -42,7 +28,9 @@ async def test_astream() -> None: full = token if full is None else full + token if token.usage_metadata is not None: chunks_with_token_counts += 1 - if token.response_metadata: + if token.response_metadata and not set(token.response_metadata.keys()).issubset( + {"model_provider", "output_version"} + ): chunks_with_response_metadata += 1 if chunks_with_token_counts != 1 or chunks_with_response_metadata != 1: msg = ( @@ -63,131 +51,6 @@ async def test_astream() -> None: assert full.response_metadata["model_name"] -async def test_abatch() -> None: - """Test streaming tokens from ChatMistralAI.""" - llm = ChatMistralAI() - - result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"]) - for token in result: - assert isinstance(token.content, str) - - -async def test_abatch_tags() -> None: - """Test batch tokens from ChatMistralAI.""" - llm = ChatMistralAI() - - result = await llm.abatch( - ["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]} - ) - for token in result: - assert isinstance(token.content, str) - - -def test_batch() -> None: - """Test batch tokens from ChatMistralAI.""" - llm = ChatMistralAI() - - result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"]) - for token in result: - assert isinstance(token.content, str) - - -async def test_ainvoke() -> None: - """Test invoke tokens from ChatMistralAI.""" - llm = ChatMistralAI() - - result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]}) - assert isinstance(result.content, str) - assert "model_name" in result.response_metadata - - -def test_invoke() -> None: - """Test invoke tokens from ChatMistralAI.""" - llm = ChatMistralAI() - - result = llm.invoke("I'm Pickle Rick", config={"tags": ["foo"]}) - assert isinstance(result.content, str) - - -def test_chat_mistralai_llm_output_contains_model_name() -> None: - """Test llm_output contains model_name.""" - chat = ChatMistralAI(max_tokens=10) - message = HumanMessage(content="Hello") - llm_result = chat.generate([[message]]) - assert llm_result.llm_output is not None - assert llm_result.llm_output["model_name"] == chat.model - - -def test_chat_mistralai_streaming_llm_output_contains_model_name() -> None: - """Test llm_output contains model_name.""" - chat = ChatMistralAI(max_tokens=10, streaming=True) - message = HumanMessage(content="Hello") - llm_result = chat.generate([[message]]) - assert llm_result.llm_output is not None - assert llm_result.llm_output["model_name"] == chat.model - - -def test_chat_mistralai_llm_output_contains_token_usage() -> None: - """Test llm_output contains model_name.""" - chat = ChatMistralAI(max_tokens=10) - message = HumanMessage(content="Hello") - llm_result = chat.generate([[message]]) - assert llm_result.llm_output is not None - assert "token_usage" in llm_result.llm_output - token_usage = llm_result.llm_output["token_usage"] - assert "prompt_tokens" in token_usage - assert "completion_tokens" in token_usage - assert "total_tokens" in token_usage - - -def test_chat_mistralai_streaming_llm_output_not_contain_token_usage() -> None: - """Mistral currently doesn't return token usage when streaming.""" - chat = ChatMistralAI(max_tokens=10, streaming=True) - message = HumanMessage(content="Hello") - llm_result = chat.generate([[message]]) - assert llm_result.llm_output is not None - assert "token_usage" in llm_result.llm_output - token_usage = llm_result.llm_output["token_usage"] - assert not token_usage - - -def test_structured_output() -> None: - llm = ChatMistralAI(model="mistral-large-latest", temperature=0) # type: ignore[call-arg] - schema = { - "title": "AnswerWithJustification", - "description": ( - "An answer to the user question along with justification for the answer." - ), - "type": "object", - "properties": { - "answer": {"title": "Answer", "type": "string"}, - "justification": {"title": "Justification", "type": "string"}, - }, - "required": ["answer", "justification"], - } - structured_llm = llm.with_structured_output(schema) - result = structured_llm.invoke( - "What weighs more a pound of bricks or a pound of feathers" - ) - assert isinstance(result, dict) - - -def test_streaming_structured_output() -> None: - llm = ChatMistralAI(model="mistral-large-latest", temperature=0) # type: ignore[call-arg] - - class Person(BaseModel): - name: str - age: int - - structured_llm = llm.with_structured_output(Person) - strm = structured_llm.stream("Erick, 27 years old") - for chunk_num, chunk in enumerate(strm): - assert chunk_num == 0, "should only have one chunk with model" - assert isinstance(chunk, Person) - assert chunk.name == "Erick" - assert chunk.age == 27 - - class Book(BaseModel): name: str authors: list[str] @@ -247,66 +110,6 @@ async def test_structured_output_json_schema_async(schema: Any) -> None: _check_parsed_result(chunk, schema) -def test_tool_call() -> None: - llm = ChatMistralAI(model="mistral-large-latest", temperature=0) # type: ignore[call-arg] - - class Person(BaseModel): - name: str - age: int - - tool_llm = llm.bind_tools([Person]) - - result = tool_llm.invoke("Erick, 27 years old") - assert isinstance(result, AIMessage) - assert len(result.tool_calls) == 1 - tool_call = result.tool_calls[0] - assert tool_call["name"] == "Person" - assert tool_call["args"] == {"name": "Erick", "age": 27} - - -def test_streaming_tool_call() -> None: - llm = ChatMistralAI(model="mistral-large-latest", temperature=0) # type: ignore[call-arg] - - class Person(BaseModel): - name: str - age: int - - tool_llm = llm.bind_tools([Person]) - - # where it calls the tool - strm = tool_llm.stream("Erick, 27 years old") - - additional_kwargs = None - for chunk in strm: - assert isinstance(chunk, AIMessageChunk) - assert chunk.content == "" - additional_kwargs = chunk.additional_kwargs - - assert additional_kwargs is not None - assert "tool_calls" in additional_kwargs - assert len(additional_kwargs["tool_calls"]) == 1 - assert additional_kwargs["tool_calls"][0]["function"]["name"] == "Person" - assert json.loads(additional_kwargs["tool_calls"][0]["function"]["arguments"]) == { - "name": "Erick", - "age": 27, - } - - assert isinstance(chunk, AIMessageChunk) - assert len(chunk.tool_call_chunks) == 1 - tool_call_chunk = chunk.tool_call_chunks[0] - assert tool_call_chunk["name"] == "Person" - assert tool_call_chunk["args"] == '{"name": "Erick", "age": 27}' - - # where it doesn't call the tool - strm = tool_llm.stream("What is 2+2?") - acc: Any = None - for chunk in strm: - assert isinstance(chunk, AIMessageChunk) - acc = chunk if acc is None else acc + chunk - assert acc.content != "" - assert "tool_calls" not in acc.additional_kwargs - - def test_retry_parameters(caplog: pytest.LogCaptureFixture) -> None: """Test that retry parameters are honored in ChatMistralAI.""" # Create a model with intentionally short timeout and multiple retries @@ -342,3 +145,51 @@ def test_retry_parameters(caplog: pytest.LogCaptureFixture) -> None: except Exception: logger.exception("Unexpected exception") raise + + +def test_reasoning() -> None: + model = ChatMistralAI(model="magistral-medium-latest") # type: ignore[call-arg] + input_message = { + "role": "user", + "content": "Hello, my name is Bob.", + } + full: AIMessageChunk | None = None + for chunk in model.stream([input_message]): + assert isinstance(chunk, AIMessageChunk) + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + thinking_blocks = 0 + for i, block in enumerate(full.content): + if isinstance(block, dict) and block.get("type") == "thinking": + thinking_blocks += 1 + reasoning_block = full.content_blocks[i] + assert reasoning_block["type"] == "reasoning" + assert isinstance(reasoning_block.get("reasoning"), str) + assert thinking_blocks > 0 + + next_message = {"role": "user", "content": "What is my name?"} + _ = model.invoke([input_message, full, next_message]) + + +def test_reasoning_v1() -> None: + model = ChatMistralAI(model="magistral-medium-latest", output_version="v1") # type: ignore[call-arg] + input_message = { + "role": "user", + "content": "Hello, my name is Bob.", + } + full: AIMessageChunk | None = None + chunks = [] + for chunk in model.stream([input_message]): + assert isinstance(chunk, AIMessageChunk) + full = chunk if full is None else full + chunk + chunks.append(chunk) + assert isinstance(full, AIMessageChunk) + reasoning_blocks = 0 + for block in full.content: + if isinstance(block, dict) and block.get("type") == "reasoning": + reasoning_blocks += 1 + assert isinstance(block.get("reasoning"), str) + assert reasoning_blocks > 0 + + next_message = {"role": "user", "content": "What is my name?"} + _ = model.invoke([input_message, full, next_message]) diff --git a/libs/partners/mistralai/tests/integration_tests/test_embeddings.py b/libs/partners/mistralai/tests/integration_tests/test_embeddings.py index 3ef91728e2f..b5e0f2787cc 100644 --- a/libs/partners/mistralai/tests/integration_tests/test_embeddings.py +++ b/libs/partners/mistralai/tests/integration_tests/test_embeddings.py @@ -35,7 +35,7 @@ async def test_mistralai_embedding_documents_async() -> None: assert len(output[0]) == 1024 -async def test_mistralai_embedding_documents_http_error_async() -> None: +async def test_mistralai_embedding_documents_tenacity_error_async() -> None: """Test MistralAI embeddings for documents.""" documents = ["foo bar", "test document"] embedding = MistralAIEmbeddings(max_retries=0) @@ -50,6 +50,21 @@ async def test_mistralai_embedding_documents_http_error_async() -> None: await embedding.aembed_documents(documents) +async def test_mistralai_embedding_documents_http_error_async() -> None: + """Test MistralAI embeddings for documents.""" + documents = ["foo bar", "test document"] + embedding = MistralAIEmbeddings(max_retries=None) + mock_response = httpx.Response( + status_code=400, + request=httpx.Request("POST", url=embedding.async_client.base_url), + ) + with ( + patch.object(embedding.async_client, "post", return_value=mock_response), + pytest.raises(httpx.HTTPStatusError), + ): + await embedding.aembed_documents(documents) + + async def test_mistralai_embedding_query_async() -> None: """Test MistralAI embeddings for query.""" document = "foo bar" diff --git a/libs/partners/mistralai/tests/unit_tests/test_chat_models.py b/libs/partners/mistralai/tests/unit_tests/test_chat_models.py index 7851275ed94..724531694b8 100644 --- a/libs/partners/mistralai/tests/unit_tests/test_chat_models.py +++ b/libs/partners/mistralai/tests/unit_tests/test_chat_models.py @@ -188,6 +188,7 @@ def test__convert_dict_to_message_tool_call() -> None: type="tool_call", ) ], + response_metadata={"model_provider": "mistralai"}, ) assert result == expected_output assert _convert_message_to_mistral_chat_message(expected_output) == message @@ -218,7 +219,7 @@ def test__convert_dict_to_message_tool_call() -> None: InvalidToolCall( name="GenerateUsername", args="oops", - error="Function GenerateUsername arguments:\n\noops\n\nare not valid JSON. Received JSONDecodeError Expecting value: line 1 column 1 (char 0)\nFor troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/OUTPUT_PARSING_FAILURE ", # noqa: E501 + error="Function GenerateUsername arguments:\n\noops\n\nare not valid JSON. Received JSONDecodeError Expecting value: line 1 column 1 (char 0)\nFor troubleshooting, visit: https://docs.langchain.com/oss/python/langchain/errors/OUTPUT_PARSING_FAILURE ", # noqa: E501 id="ssAbar4Dr", type="invalid_tool_call", ), @@ -231,6 +232,7 @@ def test__convert_dict_to_message_tool_call() -> None: type="tool_call", ), ], + response_metadata={"model_provider": "mistralai"}, ) assert result == expected_output assert _convert_message_to_mistral_chat_message(expected_output) == message @@ -315,3 +317,36 @@ def test_retry_with_failure_then_success() -> None: result = chat.invoke("Hello") assert result.content == "Hello!" assert call_count == 2, f"Expected 2 calls, but got {call_count}" + + +def test_no_duplicate_tool_calls_when_multiple_tools() -> None: + """ + Tests wether the conversion of an AIMessage with more than one tool call + to a Mistral assistant message correctly returns each tool call exactly + once in the final payload. + + The current implementation uses a faulty for loop which produces N*N entries in the + final tool_calls array of the payload (and thus duplicates tool call ids). + """ + msg = AIMessage( + content="", # content should be blank when tool_calls are present + tool_calls=[ + ToolCall(name="tool_a", args={"x": 1}, id="id_a", type="tool_call"), + ToolCall(name="tool_b", args={"y": 2}, id="id_b", type="tool_call"), + ], + response_metadata={"model_provider": "mistralai"}, + ) + + mistral_msg = _convert_message_to_mistral_chat_message(msg) + + assert mistral_msg["role"] == "assistant" + assert "tool_calls" in mistral_msg, "Expected tool_calls to be present." + + tool_calls = mistral_msg["tool_calls"] + # With the bug, this would be 4 (2x2); we expect exactly 2 entries. + assert len(tool_calls) == 2, f"Expected 2 tool calls, got {len(tool_calls)}" + + # Ensure there are no duplicate ids + ids = [tc.get("id") for tc in tool_calls if isinstance(tc, dict)] + assert len(ids) == 2 + assert len(set(ids)) == 2, f"Duplicate tool call IDs found: {ids}" diff --git a/libs/partners/mistralai/uv.lock b/libs/partners/mistralai/uv.lock index aba47d37152..431588be339 100644 --- a/libs/partners/mistralai/uv.lock +++ b/libs/partners/mistralai/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.10.0, <4.0.0" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -203,7 +203,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ @@ -349,7 +349,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a6" +version = "1.0.1" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -407,7 +407,7 @@ typing = [ [[package]] name = "langchain-mistralai" -version = "1.0.0a1" +version = "1.0.1" source = { editable = "." } dependencies = [ { name = "httpx" }, @@ -463,7 +463,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -1067,7 +1067,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1075,96 +1075,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] diff --git a/libs/partners/nomic/README.md b/libs/partners/nomic/README.md index 9fc4e3a4cd7..47d36b6d956 100644 --- a/libs/partners/nomic/README.md +++ b/libs/partners/nomic/README.md @@ -1,5 +1,22 @@ # langchain-nomic +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-nomic?label=%20)](https://pypi.org/project/langchain-nomic/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-nomic)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-nomic)](https://pypistats.org/packages/langchain-nomic) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-nomic +``` + +## πŸ€” What is this? + This package contains the LangChain integration with Nomic +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/nomic) for more details. diff --git a/libs/partners/nomic/langchain_nomic/embeddings.py b/libs/partners/nomic/langchain_nomic/embeddings.py index 5e1253c1e92..775f8fad85b 100644 --- a/libs/partners/nomic/langchain_nomic/embeddings.py +++ b/libs/partners/nomic/langchain_nomic/embeddings.py @@ -11,7 +11,7 @@ from nomic import embed class NomicEmbeddings(Embeddings): - """NomicEmbeddings embedding model. + """`NomicEmbeddings` embedding model. Example: ```python @@ -63,20 +63,25 @@ class NomicEmbeddings(Embeddings): device: str | None = None, vision_model: str | None = None, ): - """Initialize NomicEmbeddings model. + """Initialize `NomicEmbeddings` model. Args: - model: model name - nomic_api_key: optionally, set the Nomic API key. Uses the `NOMIC_API_KEY` + model: Model name + nomic_api_key: Optionally, set the Nomic API key. Uses the `NOMIC_API_KEY` environment variable by default. dimensionality: The embedding dimension, for use with Matryoshka-capable models. Defaults to full-size. inference_mode: How to generate embeddings. One of `'remote'`, `'local'` - (Embed4All), or `'dynamic'` (automatic). Defaults to `'remote'`. + (Embed4All), or `'dynamic'` (automatic). device: The device to use for local embeddings. Choices include `'cpu'`, `'gpu'`, `'nvidia'`, `'amd'`, or a specific device name. See the docstring for `GPT4All.__init__` for more info. - Typically defaults to `'cpu'`. Do not use on macOS. + + Typically defaults to `'cpu'`. + + !!! warning + + Do not use on macOS. vision_model: The vision model to use for image embeddings. """ @@ -93,8 +98,8 @@ class NomicEmbeddings(Embeddings): """Embed texts. Args: - texts: list of texts to embed - task_type: the task type to use when embedding. One of `'search_query'`, + texts: List of texts to embed + task_type: The task type to use when embedding. One of `'search_query'`, `'search_document'`, `'classification'`, `'clustering'` """ @@ -112,7 +117,7 @@ class NomicEmbeddings(Embeddings): """Embed search docs. Args: - texts: list of texts to embed as documents + texts: List of texts to embed as documents """ return self.embed( @@ -124,7 +129,7 @@ class NomicEmbeddings(Embeddings): """Embed query text. Args: - text: query text + text: Query text """ return self.embed( @@ -136,7 +141,7 @@ class NomicEmbeddings(Embeddings): """Embed images. Args: - uris: list of image URIs to embed + uris: List of image URIs to embed """ return embed.image( images=uris, diff --git a/libs/partners/nomic/pyproject.toml b/libs/partners/nomic/pyproject.toml index b344c896071..5ec7b09b936 100644 --- a/libs/partners/nomic/pyproject.toml +++ b/libs/partners/nomic/pyproject.toml @@ -7,23 +7,23 @@ authors = [] license = { text = "MIT" } requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", "nomic>=3.5.3,<4.0.0", - "pillow>=10.3.0,<11.0.0", + "pillow>=10.3.0,<12.0.0", ] name = "langchain-nomic" -version = "1.0.0a1" +version = "1.0.0" description = "An integration package connecting Nomic and LangChain" readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/nomic" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/nomic" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-nomic%22" -docs = "https://reference.langchain.com/python/integrations/langchain_nomic/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/nomic" +Documentation = "https://reference.langchain.com/python/integrations/langchain_nomic/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/nomic" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-nomic%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ @@ -40,7 +40,7 @@ test = [ test_integration = [] lint = ["ruff>=0.13.1,<0.14.0"] typing = [ - "mypy>=0.991.0,<1.0.0", + "mypy>=1.18.1,<1.19.0", "langchain-core" ] dev = ["langchain-core"] diff --git a/libs/partners/nomic/uv.lock b/libs/partners/nomic/uv.lock index 0fae4e2f419..bda56e3ddad 100644 --- a/libs/partners/nomic/uv.lock +++ b/libs/partners/nomic/uv.lock @@ -226,7 +226,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ @@ -362,7 +362,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a6" +version = "1.0.1" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -420,7 +420,7 @@ typing = [ [[package]] name = "langchain-nomic" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "langchain-core" }, @@ -455,7 +455,7 @@ typing = [ requires-dist = [ { name = "langchain-core", editable = "../../core" }, { name = "nomic", specifier = ">=3.5.3,<4.0.0" }, - { name = "pillow", specifier = ">=10.3.0,<11.0.0" }, + { name = "pillow", specifier = ">=10.3.0,<12.0.0" }, ] [package.metadata.requires-dev] @@ -475,12 +475,12 @@ test = [ test-integration = [] typing = [ { name = "langchain-core", editable = "../../core" }, - { name = "mypy", specifier = ">=0.991.0,<1.0.0" }, + { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, ] [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -679,28 +679,47 @@ wheels = [ [[package]] name = "mypy" -version = "0.991" +version = "1.18.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions" }, + { name = "pathspec" }, { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0e/5c/fbe112ca73d4c6a9e65336f48099c60800514d8949b4129c093a84a28dc8/mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06", size = 2688198, upload-time = "2022-11-14T16:59:24.952Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/d0/81d47bffc80d0cff84174aab266adc3401e735e13c5613418e825c146986/mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab", size = 18805560, upload-time = "2022-11-14T16:59:12.453Z" }, - { url = "https://files.pythonhosted.org/packages/d7/f4/dcab9f3c5ed410caca1b9374dbb2b2caa778d225e32f174e266e20291edf/mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d", size = 11117673, upload-time = "2022-11-14T16:58:46.973Z" }, - { url = "https://files.pythonhosted.org/packages/87/ec/62fd00fa5d8ead3ecafed3eb99ee805911f41b11536c5940df1bcb2c845d/mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6", size = 10023879, upload-time = "2022-11-14T16:59:04.238Z" }, - { url = "https://files.pythonhosted.org/packages/39/05/7a7d58afc7d00e819e553ad2485a29141e14575e3b0c43b9da6f869ede4c/mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb", size = 18209901, upload-time = "2022-11-14T16:59:20.559Z" }, - { url = "https://files.pythonhosted.org/packages/80/23/76e56e004acca691b4da4086a8c38bd67b7ae73536848dcab76cfed5c188/mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305", size = 19680479, upload-time = "2022-11-14T16:58:18.184Z" }, - { url = "https://files.pythonhosted.org/packages/f3/1d/cc67a674f1cd7f1c10619487a4245185f6f8f14cbd685b60709318e9ac27/mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c", size = 8670519, upload-time = "2022-11-14T16:59:08.595Z" }, - { url = "https://files.pythonhosted.org/packages/b8/ab/aa2e02fce8ee8885fe98ee2a0549290e9de5caa28febc0cf243bfab020e7/mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372", size = 18600640, upload-time = "2022-11-14T16:58:42.234Z" }, - { url = "https://files.pythonhosted.org/packages/28/9c/e1805f2fea93a92671f33b00dd577119f37e4a8b859d6f6ea62d3e9129fa/mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f", size = 11004105, upload-time = "2022-11-14T16:58:13.53Z" }, - { url = "https://files.pythonhosted.org/packages/df/bb/3cf400e05e30939a0fc58b34e0662d8abe8e206464665065b56cf2ca9a62/mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33", size = 9939717, upload-time = "2022-11-14T16:58:50.894Z" }, - { url = "https://files.pythonhosted.org/packages/14/05/5a4206e269268f4aecb1096bf2375a231c959987ccf3e31313221b8bc153/mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05", size = 18068884, upload-time = "2022-11-14T16:58:24.629Z" }, - { url = "https://files.pythonhosted.org/packages/4b/98/125e5d14222de8e92f44314f8df21a9c351b531b37c551526acd67486a7d/mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad", size = 19451243, upload-time = "2022-11-14T16:59:16.468Z" }, - { url = "https://files.pythonhosted.org/packages/89/76/7159258fdbf26a5ceef100b80a82d2f79b9066725a5daeb6383a8f773910/mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297", size = 8666646, upload-time = "2022-11-14T16:58:09.231Z" }, - { url = "https://files.pythonhosted.org/packages/e7/a1/c503a15ad69ff133a76c159b8287f0eadc1f521d9796bf81f935886c98f6/mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb", size = 2307767, upload-time = "2022-11-14T16:56:33.004Z" }, + { url = "https://files.pythonhosted.org/packages/03/6f/657961a0743cff32e6c0611b63ff1c1970a0b482ace35b069203bf705187/mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c", size = 12807973, upload-time = "2025-09-19T00:10:35.282Z" }, + { url = "https://files.pythonhosted.org/packages/10/e9/420822d4f661f13ca8900f5fa239b40ee3be8b62b32f3357df9a3045a08b/mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e", size = 11896527, upload-time = "2025-09-19T00:10:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/aa/73/a05b2bbaa7005f4642fcfe40fb73f2b4fb6bb44229bd585b5878e9a87ef8/mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b", size = 12507004, upload-time = "2025-09-19T00:11:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/4f/01/f6e4b9f0d031c11ccbd6f17da26564f3a0f3c4155af344006434b0a05a9d/mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66", size = 13245947, upload-time = "2025-09-19T00:10:46.923Z" }, + { url = "https://files.pythonhosted.org/packages/d7/97/19727e7499bfa1ae0773d06afd30ac66a58ed7437d940c70548634b24185/mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428", size = 13499217, upload-time = "2025-09-19T00:09:39.472Z" }, + { url = "https://files.pythonhosted.org/packages/9f/4f/90dc8c15c1441bf31cf0f9918bb077e452618708199e530f4cbd5cede6ff/mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed", size = 9766753, upload-time = "2025-09-19T00:10:49.161Z" }, + { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, + { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, + { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, + { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, + { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, ] [[package]] @@ -1041,62 +1060,114 @@ wheels = [ ] [[package]] -name = "pillow" -version = "10.4.0" +name = "pathspec" +version = "0.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/74/ad3d526f3bf7b6d3f408b73fde271ec69dfac8b81341a318ce825f2b3812/pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", size = 46555059, upload-time = "2024-07-01T09:48:43.583Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/69/a31cccd538ca0b5272be2a38347f8839b97a14be104ea08b0db92f749c74/pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e", size = 3509271, upload-time = "2024-07-01T09:45:22.07Z" }, - { url = "https://files.pythonhosted.org/packages/9a/9e/4143b907be8ea0bce215f2ae4f7480027473f8b61fcedfda9d851082a5d2/pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d", size = 3375658, upload-time = "2024-07-01T09:45:25.292Z" }, - { url = "https://files.pythonhosted.org/packages/8a/25/1fc45761955f9359b1169aa75e241551e74ac01a09f487adaaf4c3472d11/pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856", size = 4332075, upload-time = "2024-07-01T09:45:27.94Z" }, - { url = "https://files.pythonhosted.org/packages/5e/dd/425b95d0151e1d6c951f45051112394f130df3da67363b6bc75dc4c27aba/pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f", size = 4444808, upload-time = "2024-07-01T09:45:30.305Z" }, - { url = "https://files.pythonhosted.org/packages/b1/84/9a15cc5726cbbfe7f9f90bfb11f5d028586595907cd093815ca6644932e3/pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b", size = 4356290, upload-time = "2024-07-01T09:45:32.868Z" }, - { url = "https://files.pythonhosted.org/packages/b5/5b/6651c288b08df3b8c1e2f8c1152201e0b25d240e22ddade0f1e242fc9fa0/pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc", size = 4525163, upload-time = "2024-07-01T09:45:35.279Z" }, - { url = "https://files.pythonhosted.org/packages/07/8b/34854bf11a83c248505c8cb0fcf8d3d0b459a2246c8809b967963b6b12ae/pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e", size = 4463100, upload-time = "2024-07-01T09:45:37.74Z" }, - { url = "https://files.pythonhosted.org/packages/78/63/0632aee4e82476d9cbe5200c0cdf9ba41ee04ed77887432845264d81116d/pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46", size = 4592880, upload-time = "2024-07-01T09:45:39.89Z" }, - { url = "https://files.pythonhosted.org/packages/df/56/b8663d7520671b4398b9d97e1ed9f583d4afcbefbda3c6188325e8c297bd/pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984", size = 2235218, upload-time = "2024-07-01T09:45:42.771Z" }, - { url = "https://files.pythonhosted.org/packages/f4/72/0203e94a91ddb4a9d5238434ae6c1ca10e610e8487036132ea9bf806ca2a/pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141", size = 2554487, upload-time = "2024-07-01T09:45:45.176Z" }, - { url = "https://files.pythonhosted.org/packages/bd/52/7e7e93d7a6e4290543f17dc6f7d3af4bd0b3dd9926e2e8a35ac2282bc5f4/pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1", size = 2243219, upload-time = "2024-07-01T09:45:47.274Z" }, - { url = "https://files.pythonhosted.org/packages/a7/62/c9449f9c3043c37f73e7487ec4ef0c03eb9c9afc91a92b977a67b3c0bbc5/pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c", size = 3509265, upload-time = "2024-07-01T09:45:49.812Z" }, - { url = "https://files.pythonhosted.org/packages/f4/5f/491dafc7bbf5a3cc1845dc0430872e8096eb9e2b6f8161509d124594ec2d/pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be", size = 3375655, upload-time = "2024-07-01T09:45:52.462Z" }, - { url = "https://files.pythonhosted.org/packages/73/d5/c4011a76f4207a3c151134cd22a1415741e42fa5ddecec7c0182887deb3d/pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3", size = 4340304, upload-time = "2024-07-01T09:45:55.006Z" }, - { url = "https://files.pythonhosted.org/packages/ac/10/c67e20445a707f7a610699bba4fe050583b688d8cd2d202572b257f46600/pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6", size = 4452804, upload-time = "2024-07-01T09:45:58.437Z" }, - { url = "https://files.pythonhosted.org/packages/a9/83/6523837906d1da2b269dee787e31df3b0acb12e3d08f024965a3e7f64665/pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe", size = 4365126, upload-time = "2024-07-01T09:46:00.713Z" }, - { url = "https://files.pythonhosted.org/packages/ba/e5/8c68ff608a4203085158cff5cc2a3c534ec384536d9438c405ed6370d080/pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319", size = 4533541, upload-time = "2024-07-01T09:46:03.235Z" }, - { url = "https://files.pythonhosted.org/packages/f4/7c/01b8dbdca5bc6785573f4cee96e2358b0918b7b2c7b60d8b6f3abf87a070/pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d", size = 4471616, upload-time = "2024-07-01T09:46:05.356Z" }, - { url = "https://files.pythonhosted.org/packages/c8/57/2899b82394a35a0fbfd352e290945440e3b3785655a03365c0ca8279f351/pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696", size = 4600802, upload-time = "2024-07-01T09:46:08.145Z" }, - { url = "https://files.pythonhosted.org/packages/4d/d7/a44f193d4c26e58ee5d2d9db3d4854b2cfb5b5e08d360a5e03fe987c0086/pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496", size = 2235213, upload-time = "2024-07-01T09:46:10.211Z" }, - { url = "https://files.pythonhosted.org/packages/c1/d0/5866318eec2b801cdb8c82abf190c8343d8a1cd8bf5a0c17444a6f268291/pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91", size = 2554498, upload-time = "2024-07-01T09:46:12.685Z" }, - { url = "https://files.pythonhosted.org/packages/d4/c8/310ac16ac2b97e902d9eb438688de0d961660a87703ad1561fd3dfbd2aa0/pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22", size = 2243219, upload-time = "2024-07-01T09:46:14.83Z" }, - { url = "https://files.pythonhosted.org/packages/05/cb/0353013dc30c02a8be34eb91d25e4e4cf594b59e5a55ea1128fde1e5f8ea/pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", size = 3509350, upload-time = "2024-07-01T09:46:17.177Z" }, - { url = "https://files.pythonhosted.org/packages/e7/cf/5c558a0f247e0bf9cec92bff9b46ae6474dd736f6d906315e60e4075f737/pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", size = 3374980, upload-time = "2024-07-01T09:46:19.169Z" }, - { url = "https://files.pythonhosted.org/packages/84/48/6e394b86369a4eb68b8a1382c78dc092245af517385c086c5094e3b34428/pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", size = 4343799, upload-time = "2024-07-01T09:46:21.883Z" }, - { url = "https://files.pythonhosted.org/packages/3b/f3/a8c6c11fa84b59b9df0cd5694492da8c039a24cd159f0f6918690105c3be/pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", size = 4459973, upload-time = "2024-07-01T09:46:24.321Z" }, - { url = "https://files.pythonhosted.org/packages/7d/1b/c14b4197b80150fb64453585247e6fb2e1d93761fa0fa9cf63b102fde822/pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", size = 4370054, upload-time = "2024-07-01T09:46:26.825Z" }, - { url = "https://files.pythonhosted.org/packages/55/77/40daddf677897a923d5d33329acd52a2144d54a9644f2a5422c028c6bf2d/pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", size = 4539484, upload-time = "2024-07-01T09:46:29.355Z" }, - { url = "https://files.pythonhosted.org/packages/40/54/90de3e4256b1207300fb2b1d7168dd912a2fb4b2401e439ba23c2b2cabde/pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", size = 4477375, upload-time = "2024-07-01T09:46:31.756Z" }, - { url = "https://files.pythonhosted.org/packages/13/24/1bfba52f44193860918ff7c93d03d95e3f8748ca1de3ceaf11157a14cf16/pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", size = 4608773, upload-time = "2024-07-01T09:46:33.73Z" }, - { url = "https://files.pythonhosted.org/packages/55/04/5e6de6e6120451ec0c24516c41dbaf80cce1b6451f96561235ef2429da2e/pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", size = 2235690, upload-time = "2024-07-01T09:46:36.587Z" }, - { url = "https://files.pythonhosted.org/packages/74/0a/d4ce3c44bca8635bd29a2eab5aa181b654a734a29b263ca8efe013beea98/pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", size = 2554951, upload-time = "2024-07-01T09:46:38.777Z" }, - { url = "https://files.pythonhosted.org/packages/b5/ca/184349ee40f2e92439be9b3502ae6cfc43ac4b50bc4fc6b3de7957563894/pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", size = 2243427, upload-time = "2024-07-01T09:46:43.15Z" }, - { url = "https://files.pythonhosted.org/packages/c3/00/706cebe7c2c12a6318aabe5d354836f54adff7156fd9e1bd6c89f4ba0e98/pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3", size = 3525685, upload-time = "2024-07-01T09:46:45.194Z" }, - { url = "https://files.pythonhosted.org/packages/cf/76/f658cbfa49405e5ecbfb9ba42d07074ad9792031267e782d409fd8fe7c69/pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb", size = 3374883, upload-time = "2024-07-01T09:46:47.331Z" }, - { url = "https://files.pythonhosted.org/packages/46/2b/99c28c4379a85e65378211971c0b430d9c7234b1ec4d59b2668f6299e011/pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70", size = 4339837, upload-time = "2024-07-01T09:46:49.647Z" }, - { url = "https://files.pythonhosted.org/packages/f1/74/b1ec314f624c0c43711fdf0d8076f82d9d802afd58f1d62c2a86878e8615/pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be", size = 4455562, upload-time = "2024-07-01T09:46:51.811Z" }, - { url = "https://files.pythonhosted.org/packages/4a/2a/4b04157cb7b9c74372fa867096a1607e6fedad93a44deeff553ccd307868/pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0", size = 4366761, upload-time = "2024-07-01T09:46:53.961Z" }, - { url = "https://files.pythonhosted.org/packages/ac/7b/8f1d815c1a6a268fe90481232c98dd0e5fa8c75e341a75f060037bd5ceae/pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc", size = 4536767, upload-time = "2024-07-01T09:46:56.664Z" }, - { url = "https://files.pythonhosted.org/packages/e5/77/05fa64d1f45d12c22c314e7b97398ffb28ef2813a485465017b7978b3ce7/pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a", size = 4477989, upload-time = "2024-07-01T09:46:58.977Z" }, - { url = "https://files.pythonhosted.org/packages/12/63/b0397cfc2caae05c3fb2f4ed1b4fc4fc878f0243510a7a6034ca59726494/pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309", size = 4610255, upload-time = "2024-07-01T09:47:01.189Z" }, - { url = "https://files.pythonhosted.org/packages/7b/f9/cfaa5082ca9bc4a6de66ffe1c12c2d90bf09c309a5f52b27759a596900e7/pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060", size = 2235603, upload-time = "2024-07-01T09:47:03.918Z" }, - { url = "https://files.pythonhosted.org/packages/01/6a/30ff0eef6e0c0e71e55ded56a38d4859bf9d3634a94a88743897b5f96936/pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea", size = 2554972, upload-time = "2024-07-01T09:47:06.152Z" }, - { url = "https://files.pythonhosted.org/packages/48/2c/2e0a52890f269435eee38b21c8218e102c621fe8d8df8b9dd06fabf879ba/pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d", size = 2243375, upload-time = "2024-07-01T09:47:09.065Z" }, - { url = "https://files.pythonhosted.org/packages/38/30/095d4f55f3a053392f75e2eae45eba3228452783bab3d9a920b951ac495c/pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4", size = 3493889, upload-time = "2024-07-01T09:48:04.815Z" }, - { url = "https://files.pythonhosted.org/packages/f3/e8/4ff79788803a5fcd5dc35efdc9386af153569853767bff74540725b45863/pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da", size = 3346160, upload-time = "2024-07-01T09:48:07.206Z" }, - { url = "https://files.pythonhosted.org/packages/d7/ac/4184edd511b14f760c73f5bb8a5d6fd85c591c8aff7c2229677a355c4179/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026", size = 3435020, upload-time = "2024-07-01T09:48:09.66Z" }, - { url = "https://files.pythonhosted.org/packages/da/21/1749cd09160149c0a246a81d646e05f35041619ce76f6493d6a96e8d1103/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e", size = 3490539, upload-time = "2024-07-01T09:48:12.529Z" }, - { url = "https://files.pythonhosted.org/packages/b6/f5/f71fe1888b96083b3f6dfa0709101f61fc9e972c0c8d04e9d93ccef2a045/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5", size = 3476125, upload-time = "2024-07-01T09:48:14.891Z" }, - { url = "https://files.pythonhosted.org/packages/96/b9/c0362c54290a31866c3526848583a2f45a535aa9d725fd31e25d318c805f/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885", size = 3579373, upload-time = "2024-07-01T09:48:17.601Z" }, - { url = "https://files.pythonhosted.org/packages/52/3b/ce7a01026a7cf46e5452afa86f97a5e88ca97f562cafa76570178ab56d8d/pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5", size = 2554661, upload-time = "2024-07-01T09:48:20.293Z" }, + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pillow" +version = "11.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069, upload-time = "2025-07-01T09:16:30.666Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/5d/45a3553a253ac8763f3561371432a90bdbe6000fbdcf1397ffe502aa206c/pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860", size = 5316554, upload-time = "2025-07-01T09:13:39.342Z" }, + { url = "https://files.pythonhosted.org/packages/7c/c8/67c12ab069ef586a25a4a79ced553586748fad100c77c0ce59bb4983ac98/pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad", size = 4686548, upload-time = "2025-07-01T09:13:41.835Z" }, + { url = "https://files.pythonhosted.org/packages/2f/bd/6741ebd56263390b382ae4c5de02979af7f8bd9807346d068700dd6d5cf9/pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0", size = 5859742, upload-time = "2025-07-03T13:09:47.439Z" }, + { url = "https://files.pythonhosted.org/packages/ca/0b/c412a9e27e1e6a829e6ab6c2dca52dd563efbedf4c9c6aa453d9a9b77359/pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b", size = 7633087, upload-time = "2025-07-03T13:09:51.796Z" }, + { url = "https://files.pythonhosted.org/packages/59/9d/9b7076aaf30f5dd17e5e5589b2d2f5a5d7e30ff67a171eb686e4eecc2adf/pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50", size = 5963350, upload-time = "2025-07-01T09:13:43.865Z" }, + { url = "https://files.pythonhosted.org/packages/f0/16/1a6bf01fb622fb9cf5c91683823f073f053005c849b1f52ed613afcf8dae/pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae", size = 6631840, upload-time = "2025-07-01T09:13:46.161Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e6/6ff7077077eb47fde78739e7d570bdcd7c10495666b6afcd23ab56b19a43/pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9", size = 6074005, upload-time = "2025-07-01T09:13:47.829Z" }, + { url = "https://files.pythonhosted.org/packages/c3/3a/b13f36832ea6d279a697231658199e0a03cd87ef12048016bdcc84131601/pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e", size = 6708372, upload-time = "2025-07-01T09:13:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/6c/e4/61b2e1a7528740efbc70b3d581f33937e38e98ef3d50b05007267a55bcb2/pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6", size = 6277090, upload-time = "2025-07-01T09:13:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/a9/d3/60c781c83a785d6afbd6a326ed4d759d141de43aa7365725cbcd65ce5e54/pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f", size = 6985988, upload-time = "2025-07-01T09:13:55.699Z" }, + { url = "https://files.pythonhosted.org/packages/9f/28/4f4a0203165eefb3763939c6789ba31013a2e90adffb456610f30f613850/pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f", size = 2422899, upload-time = "2025-07-01T09:13:57.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531, upload-time = "2025-07-01T09:13:59.203Z" }, + { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560, upload-time = "2025-07-01T09:14:01.101Z" }, + { url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978, upload-time = "2025-07-03T13:09:55.638Z" }, + { url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168, upload-time = "2025-07-03T13:10:00.37Z" }, + { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053, upload-time = "2025-07-01T09:14:04.491Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273, upload-time = "2025-07-01T09:14:06.235Z" }, + { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043, upload-time = "2025-07-01T09:14:07.978Z" }, + { url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94", size = 6715516, upload-time = "2025-07-01T09:14:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0", size = 6274768, upload-time = "2025-07-01T09:14:11.921Z" }, + { url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac", size = 6986055, upload-time = "2025-07-01T09:14:13.623Z" }, + { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079, upload-time = "2025-07-01T09:14:15.268Z" }, + { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, + { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" }, + { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" }, + { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, + { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358, upload-time = "2025-07-01T09:14:27.053Z" }, + { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079, upload-time = "2025-07-01T09:14:30.104Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324, upload-time = "2025-07-01T09:14:31.899Z" }, + { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, + { url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328, upload-time = "2025-07-01T09:14:35.276Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652, upload-time = "2025-07-01T09:14:37.203Z" }, + { url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443, upload-time = "2025-07-01T09:14:39.344Z" }, + { url = "https://files.pythonhosted.org/packages/01/f4/91d5b3ffa718df2f53b0dc109877993e511f4fd055d7e9508682e8aba092/pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c", size = 5278474, upload-time = "2025-07-01T09:14:41.843Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0e/37d7d3eca6c879fbd9dba21268427dffda1ab00d4eb05b32923d4fbe3b12/pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd", size = 4686038, upload-time = "2025-07-01T09:14:44.008Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b0/3426e5c7f6565e752d81221af9d3676fdbb4f352317ceafd42899aaf5d8a/pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e", size = 5864407, upload-time = "2025-07-03T13:10:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c1/c6c423134229f2a221ee53f838d4be9d82bab86f7e2f8e75e47b6bf6cd77/pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1", size = 7639094, upload-time = "2025-07-03T13:10:21.857Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/09e6746630fe6372c67c648ff9deae52a2bc20897d51fa293571977ceb5d/pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805", size = 5973503, upload-time = "2025-07-01T09:14:45.698Z" }, + { url = "https://files.pythonhosted.org/packages/d5/1c/a2a29649c0b1983d3ef57ee87a66487fdeb45132df66ab30dd37f7dbe162/pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8", size = 6642574, upload-time = "2025-07-01T09:14:47.415Z" }, + { url = "https://files.pythonhosted.org/packages/36/de/d5cc31cc4b055b6c6fd990e3e7f0f8aaf36229a2698501bcb0cdf67c7146/pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", size = 6084060, upload-time = "2025-07-01T09:14:49.636Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ea/502d938cbaeec836ac28a9b730193716f0114c41325db428e6b280513f09/pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b", size = 6721407, upload-time = "2025-07-01T09:14:51.962Z" }, + { url = "https://files.pythonhosted.org/packages/45/9c/9c5e2a73f125f6cbc59cc7087c8f2d649a7ae453f83bd0362ff7c9e2aee2/pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3", size = 6273841, upload-time = "2025-07-01T09:14:54.142Z" }, + { url = "https://files.pythonhosted.org/packages/23/85/397c73524e0cd212067e0c969aa245b01d50183439550d24d9f55781b776/pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51", size = 6978450, upload-time = "2025-07-01T09:14:56.436Z" }, + { url = "https://files.pythonhosted.org/packages/17/d2/622f4547f69cd173955194b78e4d19ca4935a1b0f03a302d655c9f6aae65/pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580", size = 2423055, upload-time = "2025-07-01T09:14:58.072Z" }, + { url = "https://files.pythonhosted.org/packages/dd/80/a8a2ac21dda2e82480852978416cfacd439a4b490a501a288ecf4fe2532d/pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e", size = 5281110, upload-time = "2025-07-01T09:14:59.79Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/b79754ca790f315918732e18f82a8146d33bcd7f4494380457ea89eb883d/pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d", size = 4689547, upload-time = "2025-07-01T09:15:01.648Z" }, + { url = "https://files.pythonhosted.org/packages/49/20/716b8717d331150cb00f7fdd78169c01e8e0c219732a78b0e59b6bdb2fd6/pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced", size = 5901554, upload-time = "2025-07-03T13:10:27.018Z" }, + { url = "https://files.pythonhosted.org/packages/74/cf/a9f3a2514a65bb071075063a96f0a5cf949c2f2fce683c15ccc83b1c1cab/pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c", size = 7669132, upload-time = "2025-07-03T13:10:33.01Z" }, + { url = "https://files.pythonhosted.org/packages/98/3c/da78805cbdbee9cb43efe8261dd7cc0b4b93f2ac79b676c03159e9db2187/pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8", size = 6005001, upload-time = "2025-07-01T09:15:03.365Z" }, + { url = "https://files.pythonhosted.org/packages/6c/fa/ce044b91faecf30e635321351bba32bab5a7e034c60187fe9698191aef4f/pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59", size = 6668814, upload-time = "2025-07-01T09:15:05.655Z" }, + { url = "https://files.pythonhosted.org/packages/7b/51/90f9291406d09bf93686434f9183aba27b831c10c87746ff49f127ee80cb/pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe", size = 6113124, upload-time = "2025-07-01T09:15:07.358Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5a/6fec59b1dfb619234f7636d4157d11fb4e196caeee220232a8d2ec48488d/pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c", size = 6747186, upload-time = "2025-07-01T09:15:09.317Z" }, + { url = "https://files.pythonhosted.org/packages/49/6b/00187a044f98255225f172de653941e61da37104a9ea60e4f6887717e2b5/pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788", size = 6277546, upload-time = "2025-07-01T09:15:11.311Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5c/6caaba7e261c0d75bab23be79f1d06b5ad2a2ae49f028ccec801b0e853d6/pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31", size = 6985102, upload-time = "2025-07-01T09:15:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/f3/7e/b623008460c09a0cb38263c93b828c666493caee2eb34ff67f778b87e58c/pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e", size = 2424803, upload-time = "2025-07-01T09:15:15.695Z" }, + { url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12", size = 5278520, upload-time = "2025-07-01T09:15:17.429Z" }, + { url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a", size = 4686116, upload-time = "2025-07-01T09:15:19.423Z" }, + { url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632", size = 5864597, upload-time = "2025-07-03T13:10:38.404Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673", size = 7638246, upload-time = "2025-07-03T13:10:44.987Z" }, + { url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027", size = 5973336, upload-time = "2025-07-01T09:15:21.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77", size = 6642699, upload-time = "2025-07-01T09:15:23.186Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d2/a9a4f280c6aefedce1e8f615baaa5474e0701d86dd6f1dede66726462bbd/pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874", size = 6083789, upload-time = "2025-07-01T09:15:25.1Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/86b0cd9dbb683a9d5e960b66c7379e821a19be4ac5810e2e5a715c09a0c0/pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a", size = 6720386, upload-time = "2025-07-01T09:15:27.378Z" }, + { url = "https://files.pythonhosted.org/packages/e7/95/88efcaf384c3588e24259c4203b909cbe3e3c2d887af9e938c2022c9dd48/pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214", size = 6370911, upload-time = "2025-07-01T09:15:29.294Z" }, + { url = "https://files.pythonhosted.org/packages/2e/cc/934e5820850ec5eb107e7b1a72dd278140731c669f396110ebc326f2a503/pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635", size = 7117383, upload-time = "2025-07-01T09:15:31.128Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e9/9c0a616a71da2a5d163aa37405e8aced9a906d574b4a214bede134e731bc/pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6", size = 2511385, upload-time = "2025-07-01T09:15:33.328Z" }, + { url = "https://files.pythonhosted.org/packages/1a/33/c88376898aff369658b225262cd4f2659b13e8178e7534df9e6e1fa289f6/pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae", size = 5281129, upload-time = "2025-07-01T09:15:35.194Z" }, + { url = "https://files.pythonhosted.org/packages/1f/70/d376247fb36f1844b42910911c83a02d5544ebd2a8bad9efcc0f707ea774/pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653", size = 4689580, upload-time = "2025-07-01T09:15:37.114Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1c/537e930496149fbac69efd2fc4329035bbe2e5475b4165439e3be9cb183b/pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6", size = 5902860, upload-time = "2025-07-03T13:10:50.248Z" }, + { url = "https://files.pythonhosted.org/packages/bd/57/80f53264954dcefeebcf9dae6e3eb1daea1b488f0be8b8fef12f79a3eb10/pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36", size = 7670694, upload-time = "2025-07-03T13:10:56.432Z" }, + { url = "https://files.pythonhosted.org/packages/70/ff/4727d3b71a8578b4587d9c276e90efad2d6fe0335fd76742a6da08132e8c/pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b", size = 6005888, upload-time = "2025-07-01T09:15:39.436Z" }, + { url = "https://files.pythonhosted.org/packages/05/ae/716592277934f85d3be51d7256f3636672d7b1abfafdc42cf3f8cbd4b4c8/pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477", size = 6670330, upload-time = "2025-07-01T09:15:41.269Z" }, + { url = "https://files.pythonhosted.org/packages/e7/bb/7fe6cddcc8827b01b1a9766f5fdeb7418680744f9082035bdbabecf1d57f/pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50", size = 6114089, upload-time = "2025-07-01T09:15:43.13Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f5/06bfaa444c8e80f1a8e4bff98da9c83b37b5be3b1deaa43d27a0db37ef84/pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b", size = 6748206, upload-time = "2025-07-01T09:15:44.937Z" }, + { url = "https://files.pythonhosted.org/packages/f0/77/bc6f92a3e8e6e46c0ca78abfffec0037845800ea38c73483760362804c41/pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12", size = 6377370, upload-time = "2025-07-01T09:15:46.673Z" }, + { url = "https://files.pythonhosted.org/packages/4a/82/3a721f7d69dca802befb8af08b7c79ebcab461007ce1c18bd91a5d5896f9/pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db", size = 7121500, upload-time = "2025-07-01T09:15:48.512Z" }, + { url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835, upload-time = "2025-07-01T09:15:50.399Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8b/209bd6b62ce8367f47e68a218bffac88888fdf2c9fcf1ecadc6c3ec1ebc7/pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967", size = 5270556, upload-time = "2025-07-01T09:16:09.961Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e6/231a0b76070c2cfd9e260a7a5b504fb72da0a95279410fa7afd99d9751d6/pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe", size = 4654625, upload-time = "2025-07-01T09:16:11.913Z" }, + { url = "https://files.pythonhosted.org/packages/13/f4/10cf94fda33cb12765f2397fc285fa6d8eb9c29de7f3185165b702fc7386/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c", size = 4874207, upload-time = "2025-07-03T13:11:10.201Z" }, + { url = "https://files.pythonhosted.org/packages/72/c9/583821097dc691880c92892e8e2d41fe0a5a3d6021f4963371d2f6d57250/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25", size = 6583939, upload-time = "2025-07-03T13:11:15.68Z" }, + { url = "https://files.pythonhosted.org/packages/3b/8e/5c9d410f9217b12320efc7c413e72693f48468979a013ad17fd690397b9a/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27", size = 4957166, upload-time = "2025-07-01T09:16:13.74Z" }, + { url = "https://files.pythonhosted.org/packages/62/bb/78347dbe13219991877ffb3a91bf09da8317fbfcd4b5f9140aeae020ad71/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a", size = 5581482, upload-time = "2025-07-01T09:16:16.107Z" }, + { url = "https://files.pythonhosted.org/packages/d9/28/1000353d5e61498aaeaaf7f1e4b49ddb05f2c6575f9d4f9f914a3538b6e1/pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f", size = 6984596, upload-time = "2025-07-01T09:16:18.07Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566, upload-time = "2025-07-01T09:16:19.801Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618, upload-time = "2025-07-01T09:16:21.818Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248, upload-time = "2025-07-03T13:11:20.738Z" }, + { url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963, upload-time = "2025-07-03T13:11:26.283Z" }, + { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170, upload-time = "2025-07-01T09:16:23.762Z" }, + { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505, upload-time = "2025-07-01T09:16:25.593Z" }, + { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598, upload-time = "2025-07-01T09:16:27.732Z" }, ] [[package]] @@ -1208,45 +1279,59 @@ wheels = [ [[package]] name = "pyarrow" -version = "21.0.0" +version = "22.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ef/c2/ea068b8f00905c06329a3dfcd40d0fcc2b7d0f2e355bdb25b65e0a0e4cd4/pyarrow-21.0.0.tar.gz", hash = "sha256:5051f2dccf0e283ff56335760cbc8622cf52264d67e359d5569541ac11b6d5bc", size = 1133487, upload-time = "2025-07-18T00:57:31.761Z" } +sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151, upload-time = "2025-10-24T12:30:00.762Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/d9/110de31880016e2afc52d8580b397dbe47615defbf09ca8cf55f56c62165/pyarrow-21.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e563271e2c5ff4d4a4cbeb2c83d5cf0d4938b891518e676025f7268c6fe5fe26", size = 31196837, upload-time = "2025-07-18T00:54:34.755Z" }, - { url = "https://files.pythonhosted.org/packages/df/5f/c1c1997613abf24fceb087e79432d24c19bc6f7259cab57c2c8e5e545fab/pyarrow-21.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fee33b0ca46f4c85443d6c450357101e47d53e6c3f008d658c27a2d020d44c79", size = 32659470, upload-time = "2025-07-18T00:54:38.329Z" }, - { url = "https://files.pythonhosted.org/packages/3e/ed/b1589a777816ee33ba123ba1e4f8f02243a844fed0deec97bde9fb21a5cf/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7be45519b830f7c24b21d630a31d48bcebfd5d4d7f9d3bdb49da9cdf6d764edb", size = 41055619, upload-time = "2025-07-18T00:54:42.172Z" }, - { url = "https://files.pythonhosted.org/packages/44/28/b6672962639e85dc0ac36f71ab3a8f5f38e01b51343d7aa372a6b56fa3f3/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:26bfd95f6bff443ceae63c65dc7e048670b7e98bc892210acba7e4995d3d4b51", size = 42733488, upload-time = "2025-07-18T00:54:47.132Z" }, - { url = "https://files.pythonhosted.org/packages/f8/cc/de02c3614874b9089c94eac093f90ca5dfa6d5afe45de3ba847fd950fdf1/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bd04ec08f7f8bd113c55868bd3fc442a9db67c27af098c5f814a3091e71cc61a", size = 43329159, upload-time = "2025-07-18T00:54:51.686Z" }, - { url = "https://files.pythonhosted.org/packages/a6/3e/99473332ac40278f196e105ce30b79ab8affab12f6194802f2593d6b0be2/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9b0b14b49ac10654332a805aedfc0147fb3469cbf8ea951b3d040dab12372594", size = 45050567, upload-time = "2025-07-18T00:54:56.679Z" }, - { url = "https://files.pythonhosted.org/packages/7b/f5/c372ef60593d713e8bfbb7e0c743501605f0ad00719146dc075faf11172b/pyarrow-21.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:9d9f8bcb4c3be7738add259738abdeddc363de1b80e3310e04067aa1ca596634", size = 26217959, upload-time = "2025-07-18T00:55:00.482Z" }, - { url = "https://files.pythonhosted.org/packages/94/dc/80564a3071a57c20b7c32575e4a0120e8a330ef487c319b122942d665960/pyarrow-21.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c077f48aab61738c237802836fc3844f85409a46015635198761b0d6a688f87b", size = 31243234, upload-time = "2025-07-18T00:55:03.812Z" }, - { url = "https://files.pythonhosted.org/packages/ea/cc/3b51cb2db26fe535d14f74cab4c79b191ed9a8cd4cbba45e2379b5ca2746/pyarrow-21.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:689f448066781856237eca8d1975b98cace19b8dd2ab6145bf49475478bcaa10", size = 32714370, upload-time = "2025-07-18T00:55:07.495Z" }, - { url = "https://files.pythonhosted.org/packages/24/11/a4431f36d5ad7d83b87146f515c063e4d07ef0b7240876ddb885e6b44f2e/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:479ee41399fcddc46159a551705b89c05f11e8b8cb8e968f7fec64f62d91985e", size = 41135424, upload-time = "2025-07-18T00:55:11.461Z" }, - { url = "https://files.pythonhosted.org/packages/74/dc/035d54638fc5d2971cbf1e987ccd45f1091c83bcf747281cf6cc25e72c88/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:40ebfcb54a4f11bcde86bc586cbd0272bac0d516cfa539c799c2453768477569", size = 42823810, upload-time = "2025-07-18T00:55:16.301Z" }, - { url = "https://files.pythonhosted.org/packages/2e/3b/89fced102448a9e3e0d4dded1f37fa3ce4700f02cdb8665457fcc8015f5b/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8d58d8497814274d3d20214fbb24abcad2f7e351474357d552a8d53bce70c70e", size = 43391538, upload-time = "2025-07-18T00:55:23.82Z" }, - { url = "https://files.pythonhosted.org/packages/fb/bb/ea7f1bd08978d39debd3b23611c293f64a642557e8141c80635d501e6d53/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:585e7224f21124dd57836b1530ac8f2df2afc43c861d7bf3d58a4870c42ae36c", size = 45120056, upload-time = "2025-07-18T00:55:28.231Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0b/77ea0600009842b30ceebc3337639a7380cd946061b620ac1a2f3cb541e2/pyarrow-21.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:555ca6935b2cbca2c0e932bedd853e9bc523098c39636de9ad4693b5b1df86d6", size = 26220568, upload-time = "2025-07-18T00:55:32.122Z" }, - { url = "https://files.pythonhosted.org/packages/ca/d4/d4f817b21aacc30195cf6a46ba041dd1be827efa4a623cc8bf39a1c2a0c0/pyarrow-21.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3a302f0e0963db37e0a24a70c56cf91a4faa0bca51c23812279ca2e23481fccd", size = 31160305, upload-time = "2025-07-18T00:55:35.373Z" }, - { url = "https://files.pythonhosted.org/packages/a2/9c/dcd38ce6e4b4d9a19e1d36914cb8e2b1da4e6003dd075474c4cfcdfe0601/pyarrow-21.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:b6b27cf01e243871390474a211a7922bfbe3bda21e39bc9160daf0da3fe48876", size = 32684264, upload-time = "2025-07-18T00:55:39.303Z" }, - { url = "https://files.pythonhosted.org/packages/4f/74/2a2d9f8d7a59b639523454bec12dba35ae3d0a07d8ab529dc0809f74b23c/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e72a8ec6b868e258a2cd2672d91f2860ad532d590ce94cdf7d5e7ec674ccf03d", size = 41108099, upload-time = "2025-07-18T00:55:42.889Z" }, - { url = "https://files.pythonhosted.org/packages/ad/90/2660332eeb31303c13b653ea566a9918484b6e4d6b9d2d46879a33ab0622/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b7ae0bbdc8c6674259b25bef5d2a1d6af5d39d7200c819cf99e07f7dfef1c51e", size = 42829529, upload-time = "2025-07-18T00:55:47.069Z" }, - { url = "https://files.pythonhosted.org/packages/33/27/1a93a25c92717f6aa0fca06eb4700860577d016cd3ae51aad0e0488ac899/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:58c30a1729f82d201627c173d91bd431db88ea74dcaa3885855bc6203e433b82", size = 43367883, upload-time = "2025-07-18T00:55:53.069Z" }, - { url = "https://files.pythonhosted.org/packages/05/d9/4d09d919f35d599bc05c6950095e358c3e15148ead26292dfca1fb659b0c/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:072116f65604b822a7f22945a7a6e581cfa28e3454fdcc6939d4ff6090126623", size = 45133802, upload-time = "2025-07-18T00:55:57.714Z" }, - { url = "https://files.pythonhosted.org/packages/71/30/f3795b6e192c3ab881325ffe172e526499eb3780e306a15103a2764916a2/pyarrow-21.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf56ec8b0a5c8c9d7021d6fd754e688104f9ebebf1bf4449613c9531f5346a18", size = 26203175, upload-time = "2025-07-18T00:56:01.364Z" }, - { url = "https://files.pythonhosted.org/packages/16/ca/c7eaa8e62db8fb37ce942b1ea0c6d7abfe3786ca193957afa25e71b81b66/pyarrow-21.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e99310a4ebd4479bcd1964dff9e14af33746300cb014aa4a3781738ac63baf4a", size = 31154306, upload-time = "2025-07-18T00:56:04.42Z" }, - { url = "https://files.pythonhosted.org/packages/ce/e8/e87d9e3b2489302b3a1aea709aaca4b781c5252fcb812a17ab6275a9a484/pyarrow-21.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d2fe8e7f3ce329a71b7ddd7498b3cfac0eeb200c2789bd840234f0dc271a8efe", size = 32680622, upload-time = "2025-07-18T00:56:07.505Z" }, - { url = "https://files.pythonhosted.org/packages/84/52/79095d73a742aa0aba370c7942b1b655f598069489ab387fe47261a849e1/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f522e5709379d72fb3da7785aa489ff0bb87448a9dc5a75f45763a795a089ebd", size = 41104094, upload-time = "2025-07-18T00:56:10.994Z" }, - { url = "https://files.pythonhosted.org/packages/89/4b/7782438b551dbb0468892a276b8c789b8bbdb25ea5c5eb27faadd753e037/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:69cbbdf0631396e9925e048cfa5bce4e8c3d3b41562bbd70c685a8eb53a91e61", size = 42825576, upload-time = "2025-07-18T00:56:15.569Z" }, - { url = "https://files.pythonhosted.org/packages/b3/62/0f29de6e0a1e33518dec92c65be0351d32d7ca351e51ec5f4f837a9aab91/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:731c7022587006b755d0bdb27626a1a3bb004bb56b11fb30d98b6c1b4718579d", size = 43368342, upload-time = "2025-07-18T00:56:19.531Z" }, - { url = "https://files.pythonhosted.org/packages/90/c7/0fa1f3f29cf75f339768cc698c8ad4ddd2481c1742e9741459911c9ac477/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc56bc708f2d8ac71bd1dcb927e458c93cec10b98eb4120206a4091db7b67b99", size = 45131218, upload-time = "2025-07-18T00:56:23.347Z" }, - { url = "https://files.pythonhosted.org/packages/01/63/581f2076465e67b23bc5a37d4a2abff8362d389d29d8105832e82c9c811c/pyarrow-21.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:186aa00bca62139f75b7de8420f745f2af12941595bbbfa7ed3870ff63e25636", size = 26087551, upload-time = "2025-07-18T00:56:26.758Z" }, - { url = "https://files.pythonhosted.org/packages/c9/ab/357d0d9648bb8241ee7348e564f2479d206ebe6e1c47ac5027c2e31ecd39/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:a7a102574faa3f421141a64c10216e078df467ab9576684d5cd696952546e2da", size = 31290064, upload-time = "2025-07-18T00:56:30.214Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8a/5685d62a990e4cac2043fc76b4661bf38d06efed55cf45a334b455bd2759/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:1e005378c4a2c6db3ada3ad4c217b381f6c886f0a80d6a316fe586b90f77efd7", size = 32727837, upload-time = "2025-07-18T00:56:33.935Z" }, - { url = "https://files.pythonhosted.org/packages/fc/de/c0828ee09525c2bafefd3e736a248ebe764d07d0fd762d4f0929dbc516c9/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:65f8e85f79031449ec8706b74504a316805217b35b6099155dd7e227eef0d4b6", size = 41014158, upload-time = "2025-07-18T00:56:37.528Z" }, - { url = "https://files.pythonhosted.org/packages/6e/26/a2865c420c50b7a3748320b614f3484bfcde8347b2639b2b903b21ce6a72/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3a81486adc665c7eb1a2bde0224cfca6ceaba344a82a971ef059678417880eb8", size = 42667885, upload-time = "2025-07-18T00:56:41.483Z" }, - { url = "https://files.pythonhosted.org/packages/0a/f9/4ee798dc902533159250fb4321267730bc0a107d8c6889e07c3add4fe3a5/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fc0d2f88b81dcf3ccf9a6ae17f89183762c8a94a5bdcfa09e05cfe413acf0503", size = 43276625, upload-time = "2025-07-18T00:56:48.002Z" }, - { url = "https://files.pythonhosted.org/packages/5a/da/e02544d6997037a4b0d22d8e5f66bc9315c3671371a8b18c79ade1cefe14/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6299449adf89df38537837487a4f8d3bd91ec94354fdd2a7d30bc11c48ef6e79", size = 44951890, upload-time = "2025-07-18T00:56:52.568Z" }, - { url = "https://files.pythonhosted.org/packages/e5/4e/519c1bc1876625fe6b71e9a28287c43ec2f20f73c658b9ae1d485c0c206e/pyarrow-21.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:222c39e2c70113543982c6b34f3077962b44fca38c0bd9e68bb6781534425c10", size = 26371006, upload-time = "2025-07-18T00:56:56.379Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9b/cb3f7e0a345353def531ca879053e9ef6b9f38ed91aebcf68b09ba54dec0/pyarrow-22.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:77718810bd3066158db1e95a63c160ad7ce08c6b0710bc656055033e39cdad88", size = 34223968, upload-time = "2025-10-24T10:03:31.21Z" }, + { url = "https://files.pythonhosted.org/packages/6c/41/3184b8192a120306270c5307f105b70320fdaa592c99843c5ef78aaefdcf/pyarrow-22.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:44d2d26cda26d18f7af7db71453b7b783788322d756e81730acb98f24eb90ace", size = 35942085, upload-time = "2025-10-24T10:03:38.146Z" }, + { url = "https://files.pythonhosted.org/packages/d9/3d/a1eab2f6f08001f9fb714b8ed5cfb045e2fe3e3e3c0c221f2c9ed1e6d67d/pyarrow-22.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b9d71701ce97c95480fecb0039ec5bb889e75f110da72005743451339262f4ce", size = 44964613, upload-time = "2025-10-24T10:03:46.516Z" }, + { url = "https://files.pythonhosted.org/packages/46/46/a1d9c24baf21cfd9ce994ac820a24608decf2710521b29223d4334985127/pyarrow-22.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:710624ab925dc2b05a6229d47f6f0dac1c1155e6ed559be7109f684eba048a48", size = 47627059, upload-time = "2025-10-24T10:03:55.353Z" }, + { url = "https://files.pythonhosted.org/packages/3a/4c/f711acb13075c1391fd54bc17e078587672c575f8de2a6e62509af026dcf/pyarrow-22.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f963ba8c3b0199f9d6b794c90ec77545e05eadc83973897a4523c9e8d84e9340", size = 47947043, upload-time = "2025-10-24T10:04:05.408Z" }, + { url = "https://files.pythonhosted.org/packages/4e/70/1f3180dd7c2eab35c2aca2b29ace6c519f827dcd4cfeb8e0dca41612cf7a/pyarrow-22.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd0d42297ace400d8febe55f13fdf46e86754842b860c978dfec16f081e5c653", size = 50206505, upload-time = "2025-10-24T10:04:15.786Z" }, + { url = "https://files.pythonhosted.org/packages/80/07/fea6578112c8c60ffde55883a571e4c4c6bc7049f119d6b09333b5cc6f73/pyarrow-22.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:00626d9dc0f5ef3a75fe63fd68b9c7c8302d2b5bbc7f74ecaedba83447a24f84", size = 28101641, upload-time = "2025-10-24T10:04:22.57Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b7/18f611a8cdc43417f9394a3ccd3eace2f32183c08b9eddc3d17681819f37/pyarrow-22.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:3e294c5eadfb93d78b0763e859a0c16d4051fc1c5231ae8956d61cb0b5666f5a", size = 34272022, upload-time = "2025-10-24T10:04:28.973Z" }, + { url = "https://files.pythonhosted.org/packages/26/5c/f259e2526c67eb4b9e511741b19870a02363a47a35edbebc55c3178db22d/pyarrow-22.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:69763ab2445f632d90b504a815a2a033f74332997052b721002298ed6de40f2e", size = 35995834, upload-time = "2025-10-24T10:04:35.467Z" }, + { url = "https://files.pythonhosted.org/packages/50/8d/281f0f9b9376d4b7f146913b26fac0aa2829cd1ee7e997f53a27411bbb92/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b41f37cabfe2463232684de44bad753d6be08a7a072f6a83447eeaf0e4d2a215", size = 45030348, upload-time = "2025-10-24T10:04:43.366Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e5/53c0a1c428f0976bf22f513d79c73000926cb00b9c138d8e02daf2102e18/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:35ad0f0378c9359b3f297299c3309778bb03b8612f987399a0333a560b43862d", size = 47699480, upload-time = "2025-10-24T10:04:51.486Z" }, + { url = "https://files.pythonhosted.org/packages/95/e1/9dbe4c465c3365959d183e6345d0a8d1dc5b02ca3f8db4760b3bc834cf25/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8382ad21458075c2e66a82a29d650f963ce51c7708c7c0ff313a8c206c4fd5e8", size = 48011148, upload-time = "2025-10-24T10:04:59.585Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b4/7caf5d21930061444c3cf4fa7535c82faf5263e22ce43af7c2759ceb5b8b/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1a812a5b727bc09c3d7ea072c4eebf657c2f7066155506ba31ebf4792f88f016", size = 50276964, upload-time = "2025-10-24T10:05:08.175Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f3/cec89bd99fa3abf826f14d4e53d3d11340ce6f6af4d14bdcd54cd83b6576/pyarrow-22.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ec5d40dd494882704fb876c16fa7261a69791e784ae34e6b5992e977bd2e238c", size = 28106517, upload-time = "2025-10-24T10:05:14.314Z" }, + { url = "https://files.pythonhosted.org/packages/af/63/ba23862d69652f85b615ca14ad14f3bcfc5bf1b99ef3f0cd04ff93fdad5a/pyarrow-22.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bea79263d55c24a32b0d79c00a1c58bb2ee5f0757ed95656b01c0fb310c5af3d", size = 34211578, upload-time = "2025-10-24T10:05:21.583Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d0/f9ad86fe809efd2bcc8be32032fa72e8b0d112b01ae56a053006376c5930/pyarrow-22.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:12fe549c9b10ac98c91cf791d2945e878875d95508e1a5d14091a7aaa66d9cf8", size = 35989906, upload-time = "2025-10-24T10:05:29.485Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a8/f910afcb14630e64d673f15904ec27dd31f1e009b77033c365c84e8c1e1d/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:334f900ff08ce0423407af97e6c26ad5d4e3b0763645559ece6fbf3747d6a8f5", size = 45021677, upload-time = "2025-10-24T10:05:38.274Z" }, + { url = "https://files.pythonhosted.org/packages/13/95/aec81f781c75cd10554dc17a25849c720d54feafb6f7847690478dcf5ef8/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c6c791b09c57ed76a18b03f2631753a4960eefbbca80f846da8baefc6491fcfe", size = 47726315, upload-time = "2025-10-24T10:05:47.314Z" }, + { url = "https://files.pythonhosted.org/packages/bb/d4/74ac9f7a54cfde12ee42734ea25d5a3c9a45db78f9def949307a92720d37/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c3200cb41cdbc65156e5f8c908d739b0dfed57e890329413da2748d1a2cd1a4e", size = 47990906, upload-time = "2025-10-24T10:05:58.254Z" }, + { url = "https://files.pythonhosted.org/packages/2e/71/fedf2499bf7a95062eafc989ace56572f3343432570e1c54e6599d5b88da/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ac93252226cf288753d8b46280f4edf3433bf9508b6977f8dd8526b521a1bbb9", size = 50306783, upload-time = "2025-10-24T10:06:08.08Z" }, + { url = "https://files.pythonhosted.org/packages/68/ed/b202abd5a5b78f519722f3d29063dda03c114711093c1995a33b8e2e0f4b/pyarrow-22.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:44729980b6c50a5f2bfcc2668d36c569ce17f8b17bccaf470c4313dcbbf13c9d", size = 27972883, upload-time = "2025-10-24T10:06:14.204Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d6/d0fac16a2963002fc22c8fa75180a838737203d558f0ed3b564c4a54eef5/pyarrow-22.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e6e95176209257803a8b3d0394f21604e796dadb643d2f7ca21b66c9c0b30c9a", size = 34204629, upload-time = "2025-10-24T10:06:20.274Z" }, + { url = "https://files.pythonhosted.org/packages/c6/9c/1d6357347fbae062ad3f17082f9ebc29cc733321e892c0d2085f42a2212b/pyarrow-22.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:001ea83a58024818826a9e3f89bf9310a114f7e26dfe404a4c32686f97bd7901", size = 35985783, upload-time = "2025-10-24T10:06:27.301Z" }, + { url = "https://files.pythonhosted.org/packages/ff/c0/782344c2ce58afbea010150df07e3a2f5fdad299cd631697ae7bd3bac6e3/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ce20fe000754f477c8a9125543f1936ea5b8867c5406757c224d745ed033e691", size = 45020999, upload-time = "2025-10-24T10:06:35.387Z" }, + { url = "https://files.pythonhosted.org/packages/1b/8b/5362443737a5307a7b67c1017c42cd104213189b4970bf607e05faf9c525/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e0a15757fccb38c410947df156f9749ae4a3c89b2393741a50521f39a8cf202a", size = 47724601, upload-time = "2025-10-24T10:06:43.551Z" }, + { url = "https://files.pythonhosted.org/packages/69/4d/76e567a4fc2e190ee6072967cb4672b7d9249ac59ae65af2d7e3047afa3b/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cedb9dd9358e4ea1d9bce3665ce0797f6adf97ff142c8e25b46ba9cdd508e9b6", size = 48001050, upload-time = "2025-10-24T10:06:52.284Z" }, + { url = "https://files.pythonhosted.org/packages/01/5e/5653f0535d2a1aef8223cee9d92944cb6bccfee5cf1cd3f462d7cb022790/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:252be4a05f9d9185bb8c18e83764ebcfea7185076c07a7a662253af3a8c07941", size = 50307877, upload-time = "2025-10-24T10:07:02.405Z" }, + { url = "https://files.pythonhosted.org/packages/2d/f8/1d0bd75bf9328a3b826e24a16e5517cd7f9fbf8d34a3184a4566ef5a7f29/pyarrow-22.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:a4893d31e5ef780b6edcaf63122df0f8d321088bb0dee4c8c06eccb1ca28d145", size = 27977099, upload-time = "2025-10-24T10:08:07.259Z" }, + { url = "https://files.pythonhosted.org/packages/90/81/db56870c997805bf2b0f6eeeb2d68458bf4654652dccdcf1bf7a42d80903/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:f7fe3dbe871294ba70d789be16b6e7e52b418311e166e0e3cba9522f0f437fb1", size = 34336685, upload-time = "2025-10-24T10:07:11.47Z" }, + { url = "https://files.pythonhosted.org/packages/1c/98/0727947f199aba8a120f47dfc229eeb05df15bcd7a6f1b669e9f882afc58/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ba95112d15fd4f1105fb2402c4eab9068f0554435e9b7085924bcfaac2cc306f", size = 36032158, upload-time = "2025-10-24T10:07:18.626Z" }, + { url = "https://files.pythonhosted.org/packages/96/b4/9babdef9c01720a0785945c7cf550e4acd0ebcd7bdd2e6f0aa7981fa85e2/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c064e28361c05d72eed8e744c9605cbd6d2bb7481a511c74071fd9b24bc65d7d", size = 44892060, upload-time = "2025-10-24T10:07:26.002Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ca/2f8804edd6279f78a37062d813de3f16f29183874447ef6d1aadbb4efa0f/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6f9762274496c244d951c819348afbcf212714902742225f649cf02823a6a10f", size = 47504395, upload-time = "2025-10-24T10:07:34.09Z" }, + { url = "https://files.pythonhosted.org/packages/b9/f0/77aa5198fd3943682b2e4faaf179a674f0edea0d55d326d83cb2277d9363/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a9d9ffdc2ab696f6b15b4d1f7cec6658e1d788124418cb30030afbae31c64746", size = 48066216, upload-time = "2025-10-24T10:07:43.528Z" }, + { url = "https://files.pythonhosted.org/packages/79/87/a1937b6e78b2aff18b706d738c9e46ade5bfcf11b294e39c87706a0089ac/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ec1a15968a9d80da01e1d30349b2b0d7cc91e96588ee324ce1b5228175043e95", size = 50288552, upload-time = "2025-10-24T10:07:53.519Z" }, + { url = "https://files.pythonhosted.org/packages/60/ae/b5a5811e11f25788ccfdaa8f26b6791c9807119dffcf80514505527c384c/pyarrow-22.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bba208d9c7decf9961998edf5c65e3ea4355d5818dd6cd0f6809bec1afb951cc", size = 28262504, upload-time = "2025-10-24T10:08:00.932Z" }, + { url = "https://files.pythonhosted.org/packages/bd/b0/0fa4d28a8edb42b0a7144edd20befd04173ac79819547216f8a9f36f9e50/pyarrow-22.0.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:9bddc2cade6561f6820d4cd73f99a0243532ad506bc510a75a5a65a522b2d74d", size = 34224062, upload-time = "2025-10-24T10:08:14.101Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a8/7a719076b3c1be0acef56a07220c586f25cd24de0e3f3102b438d18ae5df/pyarrow-22.0.0-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:e70ff90c64419709d38c8932ea9fe1cc98415c4f87ea8da81719e43f02534bc9", size = 35990057, upload-time = "2025-10-24T10:08:21.842Z" }, + { url = "https://files.pythonhosted.org/packages/89/3c/359ed54c93b47fb6fe30ed16cdf50e3f0e8b9ccfb11b86218c3619ae50a8/pyarrow-22.0.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:92843c305330aa94a36e706c16209cd4df274693e777ca47112617db7d0ef3d7", size = 45068002, upload-time = "2025-10-24T10:08:29.034Z" }, + { url = "https://files.pythonhosted.org/packages/55/fc/4945896cc8638536ee787a3bd6ce7cec8ec9acf452d78ec39ab328efa0a1/pyarrow-22.0.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:6dda1ddac033d27421c20d7a7943eec60be44e0db4e079f33cc5af3b8280ccde", size = 47737765, upload-time = "2025-10-24T10:08:38.559Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5e/7cb7edeb2abfaa1f79b5d5eb89432356155c8426f75d3753cbcb9592c0fd/pyarrow-22.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:84378110dd9a6c06323b41b56e129c504d157d1a983ce8f5443761eb5256bafc", size = 48048139, upload-time = "2025-10-24T10:08:46.784Z" }, + { url = "https://files.pythonhosted.org/packages/88/c6/546baa7c48185f5e9d6e59277c4b19f30f48c94d9dd938c2a80d4d6b067c/pyarrow-22.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:854794239111d2b88b40b6ef92aa478024d1e5074f364033e73e21e3f76b25e0", size = 50314244, upload-time = "2025-10-24T10:08:55.771Z" }, + { url = "https://files.pythonhosted.org/packages/3c/79/755ff2d145aafec8d347bf18f95e4e81c00127f06d080135dfc86aea417c/pyarrow-22.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:b883fe6fd85adad7932b3271c38ac289c65b7337c2c132e9569f9d3940620730", size = 28757501, upload-time = "2025-10-24T10:09:59.891Z" }, + { url = "https://files.pythonhosted.org/packages/0e/d2/237d75ac28ced3147912954e3c1a174df43a95f4f88e467809118a8165e0/pyarrow-22.0.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:7a820d8ae11facf32585507c11f04e3f38343c1e784c9b5a8b1da5c930547fe2", size = 34355506, upload-time = "2025-10-24T10:09:02.953Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/733dfffe6d3069740f98e57ff81007809067d68626c5faef293434d11bd6/pyarrow-22.0.0-cp314-cp314t-macosx_12_0_x86_64.whl", hash = "sha256:c6ec3675d98915bf1ec8b3c7986422682f7232ea76cad276f4c8abd5b7319b70", size = 36047312, upload-time = "2025-10-24T10:09:10.334Z" }, + { url = "https://files.pythonhosted.org/packages/7c/2b/29d6e3782dc1f299727462c1543af357a0f2c1d3c160ce199950d9ca51eb/pyarrow-22.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3e739edd001b04f654b166204fc7a9de896cf6007eaff33409ee9e50ceaff754", size = 45081609, upload-time = "2025-10-24T10:09:18.61Z" }, + { url = "https://files.pythonhosted.org/packages/8d/42/aa9355ecc05997915af1b7b947a7f66c02dcaa927f3203b87871c114ba10/pyarrow-22.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7388ac685cab5b279a41dfe0a6ccd99e4dbf322edfb63e02fc0443bf24134e91", size = 47703663, upload-time = "2025-10-24T10:09:27.369Z" }, + { url = "https://files.pythonhosted.org/packages/ee/62/45abedde480168e83a1de005b7b7043fd553321c1e8c5a9a114425f64842/pyarrow-22.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f633074f36dbc33d5c05b5dc75371e5660f1dbf9c8b1d95669def05e5425989c", size = 48066543, upload-time = "2025-10-24T10:09:34.908Z" }, + { url = "https://files.pythonhosted.org/packages/84/e9/7878940a5b072e4f3bf998770acafeae13b267f9893af5f6d4ab3904b67e/pyarrow-22.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4c19236ae2402a8663a2c8f21f1870a03cc57f0bef7e4b6eb3238cc82944de80", size = 50288838, upload-time = "2025-10-24T10:09:44.394Z" }, + { url = "https://files.pythonhosted.org/packages/7b/03/f335d6c52b4a4761bcc83499789a1e2e16d9d201a58c327a9b5cc9a41bd9/pyarrow-22.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0c34fe18094686194f204a3b1787a27456897d8a2d62caf84b61e8dfbc0252ae", size = 29185594, upload-time = "2025-10-24T10:09:53.111Z" }, ] [[package]] @@ -1260,7 +1345,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1268,96 +1353,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/1e/4f0a3233767010308f2fd6bd0814597e3f63f1dc98304a9112b8759df4ff/pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74", size = 819383, upload-time = "2025-10-17T15:04:21.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6b/83661fa77dcefa195ad5f8cd9af3d1a7450fd57cc883ad04d65446ac2029/pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf", size = 462431, upload-time = "2025-10-17T15:04:19.346Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, + { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, + { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, + { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, + { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, + { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, + { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, + { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, + { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, + { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, + { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, + { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, + { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, + { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, + { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, + { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, + { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, + { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, + { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, + { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, + { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, + { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, + { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, + { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, + { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, + { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, + { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, + { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, + { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, + { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, + { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, + { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, + { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, + { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, + { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, + { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, + { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, + { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, + { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, + { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, + { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, + { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, + { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, + { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, + { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, + { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, + { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, + { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, ] [[package]] diff --git a/libs/partners/ollama/README.md b/libs/partners/ollama/README.md index 4359ebd834c..627611c875f 100644 --- a/libs/partners/ollama/README.md +++ b/libs/partners/ollama/README.md @@ -1,7 +1,24 @@ # langchain-ollama +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-ollama?label=%20)](https://pypi.org/project/langchain-ollama/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-ollama)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-ollama)](https://pypistats.org/packages/langchain-ollama) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-ollama +``` + +## πŸ€” What is this? + This package contains the LangChain integration with Ollama +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/ollama) for more details. ## Development diff --git a/libs/partners/ollama/langchain_ollama/chat_models.py b/libs/partners/ollama/langchain_ollama/chat_models.py index 2e63b51d310..21567e73eca 100644 --- a/libs/partners/ollama/langchain_ollama/chat_models.py +++ b/libs/partners/ollama/langchain_ollama/chat_models.py @@ -294,7 +294,7 @@ class ChatOllama(BaseChatModel): ```python from langchain_ollama import ChatOllama - llm = ChatOllama( + model = ChatOllama( model="gpt-oss:20b", validate_model_on_init=True, temperature=0.8, @@ -309,7 +309,7 @@ class ChatOllama(BaseChatModel): ("system", "You are a helpful translator. Translate the user sentence to French."), ("human", "I love programming."), ] - llm.invoke(messages) + model.invoke(messages) ``` ```python @@ -318,7 +318,7 @@ class ChatOllama(BaseChatModel): Stream: ```python - for chunk in llm.stream("Return the words Hello World!"): + for chunk in model.stream("Return the words Hello World!"): print(chunk.text, end="") ``` @@ -331,7 +331,7 @@ class ChatOllama(BaseChatModel): ``` ```python - stream = llm.stream(messages) + stream = model.stream(messages) full = next(stream) for chunk in stream: full += chunk @@ -359,7 +359,7 @@ class ChatOllama(BaseChatModel): Async: ```python - await llm.ainvoke("Hello how are you!") + await model.ainvoke("Hello how are you!") ``` ```python @@ -383,7 +383,7 @@ class ChatOllama(BaseChatModel): ``` ```python - async for chunk in llm.astream("Say hello world!"): + async for chunk in model.astream("Say hello world!"): print(chunk.content) ``` @@ -396,7 +396,7 @@ class ChatOllama(BaseChatModel): ```python messages = [("human", "Say hello world!"), ("human", "Say goodbye world!")] - await llm.abatch(messages) + await model.abatch(messages) ``` ```python @@ -440,8 +440,8 @@ class ChatOllama(BaseChatModel): JSON mode: ```python - json_llm = ChatOllama(format="json") - llm.invoke( + json_model = ChatOllama(format="json") + json_model.invoke( "Return a query for the weather in a random location and time of day with two keys: location and time_of_day. " "Respond using JSON only." ).content @@ -495,18 +495,18 @@ class ChatOllama(BaseChatModel): ```python from langchain_ollama import ChatOllama - llm = ChatOllama( + model = ChatOllama( model="deepseek-r1:8b", validate_model_on_init=True, reasoning=True, ) - llm.invoke("how many r in the word strawberry?") + model.invoke("how many r in the word strawberry?") # or, on an invocation basis: - llm.invoke("how many r in the word strawberry?", reasoning=True) - # or llm.stream("how many r in the word strawberry?", reasoning=True) + model.invoke("how many r in the word strawberry?", reasoning=True) + # or model.stream("how many r in the word strawberry?", reasoning=True) # If not provided, the invocation will default to the ChatOllama reasoning # param provided (None by default). @@ -543,7 +543,7 @@ class ChatOllama(BaseChatModel): validate_model_on_init: bool = False """Whether to validate the model exists in Ollama locally on initialization. - !!! version-added "Added in version 0.3.4" + !!! version-added "Added in `langchain-ollama` 0.3.4" """ mirostat: int | None = None @@ -1258,17 +1258,19 @@ class ChatOllama(BaseChatModel): Args: schema: The output schema. Can be passed in as: - - a Pydantic class, - - a JSON schema - - a `TypedDict` class - - an OpenAI function/tool schema. + - An OpenAI function/tool schema. + - A JSON Schema, + - A `TypedDict` class, + - Or a Pydantic class. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. + + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. method: The method for steering model generation, one of: @@ -1282,30 +1284,39 @@ class ChatOllama(BaseChatModel): desired schema into the model call. include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a `BaseMessage`) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. kwargs: Additional keyword args aren't supported. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is False and `schema` is a Pydantic class, Runnable outputs an instance of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is False then Runnable outputs a dict. + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - If `include_raw` is True, then Runnable outputs a dict with keys: + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` - - `'raw'`: `BaseMessage` - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None - - !!! warning "Behavior changed in 0.2.2" + !!! warning "Behavior changed in `langchain-ollama` 0.2.2" Added support for structured output API via `format` parameter. - !!! warning "Behavior changed in 0.3.0" + !!! warning "Behavior changed in `langchain-ollama` 0.3.0" Updated default `method` to `'json_schema'`. ??? note "Example: `schema=Pydantic` class, `method='json_schema'`, `include_raw=False`" @@ -1327,10 +1338,10 @@ class ChatOllama(BaseChatModel): ) - llm = ChatOllama(model="llama3.1", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = ChatOllama(model="llama3.1", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") # -> AnswerWithJustification( # answer='They weigh the same', @@ -1352,13 +1363,13 @@ class ChatOllama(BaseChatModel): justification: str - llm = ChatOllama(model="llama3.1", temperature=0) - structured_llm = llm.with_structured_output( + model = ChatOllama(model="llama3.1", temperature=0) + structured_model = model.with_structured_output( AnswerWithJustification, include_raw=True, ) - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") # -> { # 'raw': AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_Ao02pnFYXD6GN1yzc0uXPsvF', 'function': {'arguments': '{"answer":"They weigh the same.","justification":"Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ."}', 'name': 'AnswerWithJustification'}, 'type': 'function'}]}), # 'parsed': AnswerWithJustification(answer='They weigh the same.', justification='Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume or density of the objects may differ.'), @@ -1385,13 +1396,13 @@ class ChatOllama(BaseChatModel): ) - llm = ChatOllama(model="llama3.1", temperature=0) - structured_llm = llm.with_structured_output( + model = ChatOllama(model="llama3.1", temperature=0) + structured_model = model.with_structured_output( AnswerWithJustification, method="function_calling", ) - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") # -> AnswerWithJustification( # answer='They weigh the same', @@ -1414,10 +1425,10 @@ class ChatOllama(BaseChatModel): justification: Annotated[str | None, None, "A justification for the answer."] - llm = ChatOllama(model="llama3.1", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = ChatOllama(model="llama3.1", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke("What weighs more a pound of bricks or a pound of feathers") + structured_model.invoke("What weighs more a pound of bricks or a pound of feathers") # -> { # 'answer': 'They weigh the same', # 'justification': 'Both a pound of bricks and a pound of feathers weigh one pound. The weight is the same, but the volume and density of the two substances differ.' @@ -1441,10 +1452,10 @@ class ChatOllama(BaseChatModel): 'required': ['answer'] } - llm = ChatOllama(model="llama3.1", temperature=0) - structured_llm = llm.with_structured_output(oai_schema) + model = ChatOllama(model="llama3.1", temperature=0) + structured_model = model.with_structured_output(oai_schema) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) # -> { @@ -1465,12 +1476,12 @@ class ChatOllama(BaseChatModel): justification: str - llm = ChatOllama(model="llama3.1", temperature=0) - structured_llm = llm.with_structured_output( + model = ChatOllama(model="llama3.1", temperature=0) + structured_model = model.with_structured_output( AnswerWithJustification, method="json_mode", include_raw=True ) - structured_llm.invoke( + structured_model.invoke( "Answer the following question. " "Make sure to return a JSON blob with keys 'answer' and 'justification'.\\n\\n" "What's heavier a pound of bricks or a pound of feathers?" diff --git a/libs/partners/ollama/langchain_ollama/embeddings.py b/libs/partners/ollama/langchain_ollama/embeddings.py index 9d2d85eebea..675211e4ff7 100644 --- a/libs/partners/ollama/langchain_ollama/embeddings.py +++ b/libs/partners/ollama/langchain_ollama/embeddings.py @@ -123,7 +123,7 @@ class OllamaEmbeddings(BaseModel, Embeddings): validate_model_on_init: bool = False """Whether to validate the model exists in ollama locally on initialization. - !!! version-added "Added in version 0.3.4" + !!! version-added "Added in `langchain-ollama` 0.3.4" """ diff --git a/libs/partners/ollama/langchain_ollama/llms.py b/libs/partners/ollama/langchain_ollama/llms.py index 45a0a79fece..d840eff45cb 100644 --- a/libs/partners/ollama/langchain_ollama/llms.py +++ b/libs/partners/ollama/langchain_ollama/llms.py @@ -53,11 +53,11 @@ class OllamaLLM(BaseLLM): Random number seed for generation reproducibility. Key init args β€” client params: - base_url: str | None + base_url: Base URL where Ollama server is hosted. - keep_alive: int | str | None + keep_alive: How long the model stays loaded into memory. - format: Literal["", "json"] + format: Specify the format of the output. See full list of supported init args and their descriptions in the params section. @@ -66,7 +66,7 @@ class OllamaLLM(BaseLLM): ```python from langchain_ollama import OllamaLLM - llm = OllamaLLM( + model = OllamaLLM( model="llama3.1", temperature=0.7, num_predict=256, @@ -78,7 +78,7 @@ class OllamaLLM(BaseLLM): Invoke: ```python input_text = "The meaning of life is " - response = llm.invoke(input_text) + response = model.invoke(input_text) print(response) ``` ```txt @@ -88,7 +88,7 @@ class OllamaLLM(BaseLLM): Stream: ```python - for chunk in llm.stream(input_text): + for chunk in model.stream(input_text): print(chunk, end="") ``` ```txt @@ -98,10 +98,10 @@ class OllamaLLM(BaseLLM): Async: ```python - response = await llm.ainvoke(input_text) + response = await model.ainvoke(input_text) # stream: - # async for chunk in llm.astream(input_text): + # async for chunk in model.astream(input_text): # print(chunk, end="") ``` """ @@ -126,7 +126,7 @@ class OllamaLLM(BaseLLM): validate_model_on_init: bool = False """Whether to validate the model exists in ollama locally on initialization. - !!! version-added "Added in version 0.3.4" + !!! version-added "Added in `langchain-ollama` 0.3.4" """ mirostat: int | None = None diff --git a/libs/partners/ollama/pyproject.toml b/libs/partners/ollama/pyproject.toml index 63306aa0421..94af7cb89c4 100644 --- a/libs/partners/ollama/pyproject.toml +++ b/libs/partners/ollama/pyproject.toml @@ -3,26 +3,27 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-ollama" +description = "An integration package connecting Ollama and LangChain" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.0" requires-python = ">=3.10.0,<4.0.0" dependencies = [ "ollama>=0.6.0,<1.0.0", - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", ] -name = "langchain-ollama" -version = "1.0.0a1" -description = "An integration package connecting Ollama and LangChain" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/ollama" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/ollama" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-ollama%22" -docs = "https://reference.langchain.com/python/integrations/langchain_ollama/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/ollama" +Documentation = "https://reference.langchain.com/python/integrations/langchain_ollama/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/ollama" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-ollama%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ diff --git a/libs/partners/ollama/tests/integration_tests/chat_models/test_chat_models.py b/libs/partners/ollama/tests/integration_tests/chat_models/test_chat_models.py index e68f2e08555..a38bdbfad9f 100644 --- a/libs/partners/ollama/tests/integration_tests/chat_models/test_chat_models.py +++ b/libs/partners/ollama/tests/integration_tests/chat_models/test_chat_models.py @@ -40,7 +40,7 @@ def test_init_model_not_found(mock_list: MagicMock) -> None: @patch("langchain_ollama.chat_models.Client.list") def test_init_connection_error(mock_list: MagicMock) -> None: - """Test that a ValidationError is raised on connect failure during init.""" + """Test that a `ValidationError` is raised on connect failure during init.""" mock_list.side_effect = ConnectError("Test connection error") with pytest.raises(ValidationError) as excinfo: diff --git a/libs/partners/ollama/tests/integration_tests/chat_models/test_chat_models_reasoning.py b/libs/partners/ollama/tests/integration_tests/chat_models/test_chat_models_reasoning.py index 6ea05a626fa..33d1fe14338 100644 --- a/libs/partners/ollama/tests/integration_tests/chat_models/test_chat_models_reasoning.py +++ b/libs/partners/ollama/tests/integration_tests/chat_models/test_chat_models_reasoning.py @@ -71,11 +71,8 @@ async def test_stream_reasoning_none(model: str, use_async: bool) -> None: result += chunk assert isinstance(result, AIMessageChunk) assert result.content - assert "" in result.content - assert "" in result.content + # reasoning_content is only captured when reasoning=True assert "reasoning_content" not in result.additional_kwargs - assert "" not in result.additional_kwargs.get("reasoning_content", "") - assert "" not in result.additional_kwargs.get("reasoning_content", "") @pytest.mark.parametrize("model", [REASONING_MODEL_NAME]) @@ -149,11 +146,8 @@ async def test_invoke_reasoning_none(model: str, use_async: bool) -> None: else: result = llm.invoke([message]) assert result.content + # reasoning_content is only captured when reasoning=True assert "reasoning_content" not in result.additional_kwargs - assert "" in result.content - assert "" in result.content - assert "" not in result.additional_kwargs.get("reasoning_content", "") - assert "" not in result.additional_kwargs.get("reasoning_content", "") @pytest.mark.parametrize("model", [REASONING_MODEL_NAME]) @@ -184,40 +178,49 @@ async def test_reasoning_invoke(model: str, use_async: bool) -> None: @pytest.mark.parametrize("model", [REASONING_MODEL_NAME]) -def test_think_tag_stripping_necessity(model: str) -> None: - """Test that demonstrates why `_strip_think_tags` is necessary. +def test_reasoning_modes_behavior(model: str) -> None: + """Test the behavior differences between reasoning modes. - DeepSeek R1 models include reasoning/thinking as their default behavior. - When `reasoning=False` is set, the user explicitly wants no reasoning content, - but Ollama cannot disable thinking at the API level for these models. - Therefore, post-processing is required to strip the `` tags. + This test documents how the Ollama API and LangChain handle reasoning content + for DeepSeek R1 models across different reasoning settings. - This test documents the specific behavior that necessitates the - `_strip_think_tags` function in the chat_models.py implementation. + Current Ollama API behavior: + - Ollama automatically separates reasoning content into a 'thinking' field + - No tags are present in responses + - `think=False` prevents the 'thinking' field from being included + - `think=None` includes the 'thinking' field (model default) + - `think=True` explicitly requests the 'thinking' field + + LangChain behavior: + - `reasoning=False`: Does not capture reasoning content + - `reasoning=None`: Does not capture reasoning content (model default behavior) + - `reasoning=True`: Captures reasoning in `additional_kwargs['reasoning_content']` """ - # Test with reasoning=None (default behavior - should include think tags) - llm_default = ChatOllama(model=model, reasoning=None, num_ctx=2**12) message = HumanMessage(content=SAMPLE) + # Test with reasoning=None (model default - no reasoning captured) + llm_default = ChatOllama(model=model, reasoning=None, num_ctx=2**12) result_default = llm_default.invoke([message]) - - # With reasoning=None, the model's default behavior includes tags - # This demonstrates why we need the stripping logic - assert "" in result_default.content - assert "" in result_default.content + assert result_default.content + assert "" not in result_default.content + assert "" not in result_default.content assert "reasoning_content" not in result_default.additional_kwargs - # Test with reasoning=False (explicit disable - should NOT include think tags) + # Test with reasoning=False (explicit disable - no reasoning captured) llm_disabled = ChatOllama(model=model, reasoning=False, num_ctx=2**12) - result_disabled = llm_disabled.invoke([message]) - - # With reasoning=False, think tags should be stripped from content - # This verifies that _strip_think_tags is working correctly + assert result_disabled.content assert "" not in result_disabled.content assert "" not in result_disabled.content assert "reasoning_content" not in result_disabled.additional_kwargs - # Verify the difference: same model, different reasoning settings - # Default includes tags, disabled strips them - assert result_default.content != result_disabled.content + # Test with reasoning=True (reasoning captured separately) + llm_enabled = ChatOllama(model=model, reasoning=True, num_ctx=2**12) + result_enabled = llm_enabled.invoke([message]) + assert result_enabled.content + assert "" not in result_enabled.content + assert "" not in result_enabled.content + assert "reasoning_content" in result_enabled.additional_kwargs + assert len(result_enabled.additional_kwargs["reasoning_content"]) > 0 + assert "" not in result_enabled.additional_kwargs["reasoning_content"] + assert "" not in result_enabled.additional_kwargs["reasoning_content"] diff --git a/libs/partners/ollama/uv.lock b/libs/partners/ollama/uv.lock index ec872991427..06ad39d6905 100644 --- a/libs/partners/ollama/uv.lock +++ b/libs/partners/ollama/uv.lock @@ -203,7 +203,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ @@ -288,7 +288,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a6" +version = "1.0.0" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -346,7 +346,7 @@ typing = [ [[package]] name = "langchain-ollama" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "langchain-core" }, @@ -400,7 +400,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -1017,7 +1017,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1025,96 +1025,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] diff --git a/libs/partners/openai/README.md b/libs/partners/openai/README.md index 6a771659f09..2cb2c569b9b 100644 --- a/libs/partners/openai/README.md +++ b/libs/partners/openai/README.md @@ -1,5 +1,22 @@ # langchain-openai +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-openai?label=%20)](https://pypi.org/project/langchain-openai/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-openai)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-openai)](https://pypistats.org/packages/langchain-openai) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-openai +``` + +## πŸ€” What is this? + This package contains the LangChain integrations for OpenAI through their `openai` SDK. +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/openai) for more details. diff --git a/libs/partners/openai/langchain_openai/chat_models/_client_utils.py b/libs/partners/openai/langchain_openai/chat_models/_client_utils.py index 3eba5c7309b..4a0efce9e1b 100644 --- a/libs/partners/openai/langchain_openai/chat_models/_client_utils.py +++ b/libs/partners/openai/langchain_openai/chat_models/_client_utils.py @@ -9,11 +9,14 @@ Logic is largely replicated from openai._base_client. from __future__ import annotations import asyncio +import inspect import os +from collections.abc import Awaitable, Callable from functools import lru_cache -from typing import Any +from typing import Any, cast import openai +from pydantic import SecretStr class _SyncHttpxClientWrapper(openai.DefaultHttpxClient): @@ -107,3 +110,33 @@ def _get_default_async_httpx_client( return _build_async_httpx_client(base_url, timeout) else: return _cached_async_httpx_client(base_url, timeout) + + +def _resolve_sync_and_async_api_keys( + api_key: SecretStr | Callable[[], str] | Callable[[], Awaitable[str]], +) -> tuple[str | None | Callable[[], str], str | Callable[[], Awaitable[str]]]: + """Resolve sync and async API key values. + + Because OpenAI and AsyncOpenAI clients support either sync or async callables for + the API key, we need to resolve separate values here. + """ + if isinstance(api_key, SecretStr): + sync_api_key_value: str | None | Callable[[], str] = api_key.get_secret_value() + async_api_key_value: str | Callable[[], Awaitable[str]] = ( + api_key.get_secret_value() + ) + elif callable(api_key): + if inspect.iscoroutinefunction(api_key): + async_api_key_value = api_key + sync_api_key_value = None + else: + sync_api_key_value = cast(Callable, api_key) + + async def async_api_key_wrapper() -> str: + return await asyncio.get_event_loop().run_in_executor( + None, cast(Callable, api_key) + ) + + async_api_key_value = async_api_key_wrapper + + return sync_api_key_value, async_api_key_value diff --git a/libs/partners/openai/langchain_openai/chat_models/azure.py b/libs/partners/openai/langchain_openai/chat_models/azure.py index 602b91f6980..183e27fa681 100644 --- a/libs/partners/openai/langchain_openai/chat_models/azure.py +++ b/libs/partners/openai/langchain_openai/chat_models/azure.py @@ -49,30 +49,30 @@ class AzureChatOpenAI(BaseChatOpenAI): ``` Key init args β€” completion params: - azure_deployment: str + azure_deployment: Name of Azure OpenAI deployment to use. - temperature: float + temperature: Sampling temperature. - max_tokens: int | None + max_tokens: Max number of tokens to generate. - logprobs: bool | None + logprobs: Whether to return logprobs. Key init args β€” client params: - api_version: str + api_version: Azure OpenAI REST API version to use (distinct from the version of the underlying model). [See more on the different versions.](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#rest-api-versioning) - timeout: Union[float, Tuple[float, float], Any, None] + timeout: Timeout for requests. - max_retries: int | None + max_retries: Max number of retries. - organization: str | None + organization: OpenAI organization ID. If not passed in will be read from env var `OPENAI_ORG_ID`. - model: str | None + model: The name of the underlying OpenAI model. Used for tracing and token counting. Does not affect completion. E.g. `'gpt-4'`, `'gpt-35-turbo'`, etc. - model_version: str | None + model_version: The version of the underlying OpenAI model. Used for tracing and token counting. Does not affect completion. E.g., `'0125'`, `'0125-preview'`, etc. @@ -82,7 +82,7 @@ class AzureChatOpenAI(BaseChatOpenAI): ```python from langchain_openai import AzureChatOpenAI - llm = AzureChatOpenAI( + model = AzureChatOpenAI( azure_deployment="your-deployment", api_version="2024-05-01-preview", temperature=0, @@ -127,7 +127,7 @@ class AzureChatOpenAI(BaseChatOpenAI): ), ("human", "I love programming."), ] - llm.invoke(messages) + model.invoke(messages) ``` ```python @@ -172,7 +172,7 @@ class AzureChatOpenAI(BaseChatOpenAI): Stream: ```python - for chunk in llm.stream(messages): + for chunk in model.stream(messages): print(chunk.text, end="") ``` @@ -200,7 +200,7 @@ class AzureChatOpenAI(BaseChatOpenAI): ``` ```python - stream = llm.stream(messages) + stream = model.stream(messages) full = next(stream) for chunk in stream: full += chunk @@ -221,13 +221,13 @@ class AzureChatOpenAI(BaseChatOpenAI): Async: ```python - await llm.ainvoke(messages) + await model.ainvoke(messages) # stream: - # async for chunk in (await llm.astream(messages)) + # async for chunk in (await model.astream(messages)) # batch: - # await llm.abatch([messages]) + # await model.abatch([messages]) ``` Tool calling: @@ -251,8 +251,8 @@ class AzureChatOpenAI(BaseChatOpenAI): ) - llm_with_tools = llm.bind_tools([GetWeather, GetPopulation]) - ai_msg = llm_with_tools.invoke( + model_with_tools = model.bind_tools([GetWeather, GetPopulation]) + ai_msg = model_with_tools.invoke( "Which city is hotter today and which is bigger: LA or NY?" ) ai_msg.tool_calls @@ -300,8 +300,8 @@ class AzureChatOpenAI(BaseChatOpenAI): ) - structured_llm = llm.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats") + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats") ``` ```python @@ -316,8 +316,8 @@ class AzureChatOpenAI(BaseChatOpenAI): JSON mode: ```python - json_llm = llm.bind(response_format={"type": "json_object"}) - ai_msg = json_llm.invoke( + json_model = model.bind(response_format={"type": "json_object"}) + ai_msg = json_model.invoke( "Return a JSON object with key 'random_ints' and a value of 10 random ints in [0-99]" ) ai_msg.content @@ -344,7 +344,7 @@ class AzureChatOpenAI(BaseChatOpenAI): }, ] ) - ai_msg = llm.invoke([message]) + ai_msg = model.invoke([message]) ai_msg.content ``` @@ -354,7 +354,7 @@ class AzureChatOpenAI(BaseChatOpenAI): Token usage: ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.usage_metadata ``` @@ -363,8 +363,8 @@ class AzureChatOpenAI(BaseChatOpenAI): ``` Logprobs: ```python - logprobs_llm = llm.bind(logprobs=True) - ai_msg = logprobs_llm.invoke(messages) + logprobs_model = model.bind(logprobs=True) + ai_msg = logprobs_model.invoke(messages) ai_msg.response_metadata["logprobs"] ``` @@ -422,7 +422,7 @@ class AzureChatOpenAI(BaseChatOpenAI): Response metadata ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.response_metadata ``` @@ -572,7 +572,11 @@ class AzureChatOpenAI(BaseChatOpenAI): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object.""" + """Get the namespace of the LangChain object. + + Returns: + `["langchain", "chat_models", "azure_openai"]` + """ return ["langchain", "chat_models", "azure_openai"] @property @@ -833,17 +837,19 @@ class AzureChatOpenAI(BaseChatOpenAI): Args: schema: The output schema. Can be passed in as: - - a JSON Schema, - - a `TypedDict` class, - - or a Pydantic class, - - an OpenAI function/tool schema. + - A JSON Schema, + - A `TypedDict` class, + - A Pydantic class, + - Or an OpenAI function/tool schema. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. + + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. method: The method for steering model generation, one of: @@ -863,12 +869,18 @@ class AzureChatOpenAI(BaseChatOpenAI): support which methods [here](https://platform.openai.com/docs/guides/structured-outputs/function-calling-vs-response-format). include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. strict: - True: @@ -911,16 +923,16 @@ class AzureChatOpenAI(BaseChatOpenAI): \"\"\"Get weather at a location.\"\"\" pass - llm = init_chat_model("openai:gpt-4o-mini") + model = init_chat_model("openai:gpt-4o-mini") - structured_llm = llm.with_structured_output( + structured_model = model.with_structured_output( ResponseSchema, tools=[get_weather], strict=True, include_raw=True, ) - structured_llm.invoke("What's the weather in Boston?") + structured_model.invoke("What's the weather in Boston?") ``` ```python @@ -934,31 +946,26 @@ class AzureChatOpenAI(BaseChatOpenAI): kwargs: Additional keyword args are passed through to the model. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is False and `schema` is a Pydantic class, Runnable outputs - an instance of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is False then Runnable outputs a dict. + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - If `include_raw` is True, then Runnable outputs a dict with keys: + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` - - `'raw'`: BaseMessage - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None - - !!! warning "Behavior changed in 0.1.20" - Added support for TypedDict class `schema`. - - !!! warning "Behavior changed in 0.1.21" - Support for `strict` argument added. - Support for `method="json_schema"` added. - - !!! warning "Behavior changed in 0.3.0" + !!! warning "Behavior changed in `langchain-openai` 0.3.0" `method` default changed from "function_calling" to "json_schema". - !!! warning "Behavior changed in 0.3.12" + !!! warning "Behavior changed in `langchain-openai` 0.3.12" Support for `tools` added. - !!! warning "Behavior changed in 0.3.21" + !!! warning "Behavior changed in `langchain-openai` 0.3.21" Pass `kwargs` through to the model. ??? note "Example: `schema=Pydantic` class, `method='json_schema'`, `include_raw=False`, `strict=True`" @@ -986,10 +993,12 @@ class AzureChatOpenAI(BaseChatOpenAI): ) - llm = AzureChatOpenAI(azure_deployment="...", model="gpt-4o", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = AzureChatOpenAI( + azure_deployment="...", model="gpt-4o", temperature=0 + ) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) @@ -1017,12 +1026,14 @@ class AzureChatOpenAI(BaseChatOpenAI): ) - llm = AzureChatOpenAI(azure_deployment="...", model="gpt-4o", temperature=0) - structured_llm = llm.with_structured_output( + model = AzureChatOpenAI( + azure_deployment="...", model="gpt-4o", temperature=0 + ) + structured_model = model.with_structured_output( AnswerWithJustification, method="function_calling" ) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) @@ -1046,12 +1057,14 @@ class AzureChatOpenAI(BaseChatOpenAI): justification: str - llm = AzureChatOpenAI(azure_deployment="...", model="gpt-4o", temperature=0) - structured_llm = llm.with_structured_output( + model = AzureChatOpenAI( + azure_deployment="...", model="gpt-4o", temperature=0 + ) + structured_model = model.with_structured_output( AnswerWithJustification, include_raw=True ) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) # -> { @@ -1078,10 +1091,12 @@ class AzureChatOpenAI(BaseChatOpenAI): ] - llm = AzureChatOpenAI(azure_deployment="...", model="gpt-4o", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = AzureChatOpenAI( + azure_deployment="...", model="gpt-4o", temperature=0 + ) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) # -> { @@ -1107,14 +1122,14 @@ class AzureChatOpenAI(BaseChatOpenAI): 'required': ['answer'] } - llm = AzureChatOpenAI( + model = AzureChatOpenAI( azure_deployment="...", model="gpt-4o", temperature=0, ) - structured_llm = llm.with_structured_output(oai_schema) + structured_model = model.with_structured_output(oai_schema) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) # -> { @@ -1135,16 +1150,16 @@ class AzureChatOpenAI(BaseChatOpenAI): justification: str - llm = AzureChatOpenAI( + model = AzureChatOpenAI( azure_deployment="...", model="gpt-4o", temperature=0, ) - structured_llm = llm.with_structured_output( + structured_model = model.with_structured_output( AnswerWithJustification, method="json_mode", include_raw=True ) - structured_llm.invoke( + structured_model.invoke( "Answer the following question. " "Make sure to return a JSON blob with keys 'answer' and 'justification'.\\n\\n" "What's heavier a pound of bricks or a pound of feathers?" @@ -1159,11 +1174,11 @@ class AzureChatOpenAI(BaseChatOpenAI): ??? note "Example: `schema=None`, `method='json_mode'`, `include_raw=True`" ```python - structured_llm = llm.with_structured_output( + structured_model = model.with_structured_output( method="json_mode", include_raw=True ) - structured_llm.invoke( + structured_model.invoke( "Answer the following question. " "Make sure to return a JSON blob with keys 'answer' and 'justification'.\\n\\n" "What's heavier a pound of bricks or a pound of feathers?" diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index 8fffd34e6ae..c343bf50f25 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -10,7 +10,14 @@ import re import ssl import sys import warnings -from collections.abc import AsyncIterator, Callable, Iterator, Mapping, Sequence +from collections.abc import ( + AsyncIterator, + Awaitable, + Callable, + Iterator, + Mapping, + Sequence, +) from functools import partial from io import BytesIO from json import JSONDecodeError @@ -109,6 +116,7 @@ from typing_extensions import Self from langchain_openai.chat_models._client_utils import ( _get_default_async_httpx_client, _get_default_httpx_client, + _resolve_sync_and_async_api_keys, ) from langchain_openai.chat_models._compat import ( _convert_from_v1_to_chat_completions, @@ -455,32 +463,92 @@ _DictOrPydantic: TypeAlias = dict | _BM class BaseChatOpenAI(BaseChatModel): """Base wrapper around OpenAI large language models for chat.""" - client: Any = Field(default=None, exclude=True) #: :meta private: - async_client: Any = Field(default=None, exclude=True) #: :meta private: - root_client: Any = Field(default=None, exclude=True) #: :meta private: - root_async_client: Any = Field(default=None, exclude=True) #: :meta private: + client: Any = Field(default=None, exclude=True) + + async_client: Any = Field(default=None, exclude=True) + + root_client: Any = Field(default=None, exclude=True) + + root_async_client: Any = Field(default=None, exclude=True) + model_name: str = Field(default="gpt-3.5-turbo", alias="model") """Model name to use.""" + temperature: float | None = None """What sampling temperature to use.""" + model_kwargs: dict[str, Any] = Field(default_factory=dict) """Holds any model parameters valid for `create` call not explicitly specified.""" - openai_api_key: SecretStr | None = Field( + + openai_api_key: ( + SecretStr | None | Callable[[], str] | Callable[[], Awaitable[str]] + ) = Field( alias="api_key", default_factory=secret_from_env("OPENAI_API_KEY", default=None) ) + """API key to use. + + Can be inferred from the `OPENAI_API_KEY` environment variable, or specified as a + string, or sync or async callable that returns a string. + + ??? example "Specify with environment variable" + + ```bash + export OPENAI_API_KEY=... + ``` + ```python + from langchain_openai import ChatOpenAI + + model = ChatOpenAI(model="gpt-5-nano") + ``` + + ??? example "Specify with a string" + + ```python + from langchain_openai import ChatOpenAI + + model = ChatOpenAI(model="gpt-5-nano", api_key="...") + ``` + + ??? example "Specify with a sync callable" + ```python + from langchain_openai import ChatOpenAI + + def get_api_key() -> str: + # Custom logic to retrieve API key + return "..." + + model = ChatOpenAI(model="gpt-5-nano", api_key=get_api_key) + ``` + + ??? example "Specify with an async callable" + ```python + from langchain_openai import ChatOpenAI + + async def get_api_key() -> str: + # Custom async logic to retrieve API key + return "..." + + model = ChatOpenAI(model="gpt-5-nano", api_key=get_api_key) + ``` + """ + openai_api_base: str | None = Field(default=None, alias="base_url") """Base URL path for API requests, leave blank if not using a proxy or service emulator.""" # noqa: E501 + openai_organization: str | None = Field(default=None, alias="organization") """Automatically inferred from env var `OPENAI_ORG_ID` if not provided.""" + # to support explicit proxy for OpenAI openai_proxy: str | None = Field( default_factory=from_env("OPENAI_PROXY", default=None) ) + request_timeout: float | tuple[float, float] | Any | None = Field( default=None, alias="timeout" ) """Timeout for requests to OpenAI completion API. Can be float, `httpx.Timeout` or `None`.""" + stream_usage: bool | None = None """Whether to include usage metadata in streaming output. If enabled, an additional message chunk will be generated during the stream including usage metadata. @@ -489,35 +557,47 @@ class BaseChatOpenAI(BaseChatModel): initialized with a custom client, as many chat completions APIs do not support streaming token usage. - !!! version-added "Added in version 0.3.9" + !!! version-added "Added in `langchain-openai` 0.3.9" - !!! warning "Behavior changed in 0.3.35" + !!! warning "Behavior changed in `langchain-openai` 0.3.35" Enabled for default base URL and client. """ + max_retries: int | None = None """Maximum number of retries to make when generating.""" + presence_penalty: float | None = None """Penalizes repeated tokens.""" + frequency_penalty: float | None = None """Penalizes repeated tokens according to frequency.""" + seed: int | None = None """Seed for generation""" + logprobs: bool | None = None """Whether to return logprobs.""" + top_logprobs: int | None = None """Number of most likely tokens to return at each token position, each with an associated log probability. `logprobs` must be set to true if this parameter is used.""" + logit_bias: dict[int, int] | None = None """Modify the likelihood of specified tokens appearing in the completion.""" + streaming: bool = False """Whether to stream the results or not.""" + n: int | None = None """Number of chat completions to generate for each prompt.""" + top_p: float | None = None """Total probability mass of tokens to consider at each step.""" + max_tokens: int | None = Field(default=None) """Maximum number of tokens to generate.""" + reasoning_effort: str | None = None """Constrains effort on reasoning for reasoning models. For use with the Chat Completions API. @@ -528,6 +608,7 @@ class BaseChatOpenAI(BaseChatModel): `'high'`. Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response. """ + reasoning: dict[str, Any] | None = None """Reasoning parameters for reasoning models. For use with the Responses API. @@ -538,16 +619,18 @@ class BaseChatOpenAI(BaseChatModel): } ``` - !!! version-added "Added in version 0.3.24" + !!! version-added "Added in `langchain-openai` 0.3.24" """ + verbosity: str | None = None """Controls the verbosity level of responses for reasoning models. For use with the Responses API. Currently supported values are `'low'`, `'medium'`, and `'high'`. - !!! version-added "Added in version 0.3.28" + !!! version-added "Added in `langchain-openai` 0.3.28" """ + tiktoken_model_name: str | None = None """The model name to pass to tiktoken when using this class. Tiktoken is used to count the number of tokens in documents to constrain @@ -558,19 +641,25 @@ class BaseChatOpenAI(BaseChatModel): when using one of the many model providers that expose an OpenAI-like API but with different models. In those cases, in order to avoid erroring when tiktoken is called, you can specify a model name to use here.""" + default_headers: Mapping[str, str] | None = None + default_query: Mapping[str, object] | None = None + # Configure a custom httpx client. See the # [httpx documentation](https://www.python-httpx.org/api/#client) for more details. http_client: Any | None = Field(default=None, exclude=True) """Optional `httpx.Client`. Only used for sync invocations. Must specify `http_async_client` as well if you'd like a custom client for async invocations. """ + http_async_client: Any | None = Field(default=None, exclude=True) """Optional `httpx.AsyncClient`. Only used for async invocations. Must specify `http_client` as well if you'd like a custom client for sync invocations.""" + stop: list[str] | str | None = Field(default=None, alias="stop_sequences") """Default stop sequences.""" + extra_body: Mapping[str, Any] | None = None """Optional additional JSON properties to include in the request parameters when making requests to OpenAI compatible APIs, such as vLLM, LM Studio, or other @@ -593,6 +682,7 @@ class BaseChatOpenAI(BaseChatModel): include_response_headers: bool = False """Whether to include response headers in the output message `response_metadata`.""" + disabled_params: dict[str, Any] | None = Field(default=None) """Parameters of the OpenAI client or `chat.completions` endpoint that should be disabled for the given model. @@ -621,7 +711,7 @@ class BaseChatOpenAI(BaseChatModel): - `'reasoning.encrypted_content'` - `'code_interpreter_call.outputs'` - !!! version-added "Added in version 0.3.24" + !!! version-added "Added in `langchain-openai` 0.3.24" """ service_tier: str | None = None @@ -630,10 +720,11 @@ class BaseChatOpenAI(BaseChatModel): """ store: bool | None = None - """If `True`, OpenAI may store response data for future use. Defaults to `True` - for the Responses API and `False` for the Chat Completions API. + """If `True`, OpenAI may store response data for future use. - !!! version-added "Added in version 0.3.24" + Defaults to `True` for the Responses API and `False` for the Chat Completions API. + + !!! version-added "Added in `langchain-openai` 0.3.24" """ truncation: str | None = None @@ -641,7 +732,7 @@ class BaseChatOpenAI(BaseChatModel): If `'auto'`, model may drop input items from the middle of the message sequence to fit the context window. - !!! version-added "Added in version 0.3.24" + !!! version-added "Added in `langchain-openai` 0.3.24" """ use_previous_response_id: bool = False @@ -654,11 +745,11 @@ class BaseChatOpenAI(BaseChatModel): For example, the following two are equivalent: ```python - llm = ChatOpenAI( + model = ChatOpenAI( model="...", use_previous_response_id=True, ) - llm.invoke( + model.invoke( [ HumanMessage("Hello"), AIMessage("Hi there!", response_metadata={"id": "resp_123"}), @@ -668,11 +759,11 @@ class BaseChatOpenAI(BaseChatModel): ``` ```python - llm = ChatOpenAI(model="...", use_responses_api=True) - llm.invoke([HumanMessage("How are you?")], previous_response_id="resp_123") + model = ChatOpenAI(model="...", use_responses_api=True) + model.invoke([HumanMessage("How are you?")], previous_response_id="resp_123") ``` - !!! version-added "Added in version 0.3.26" + !!! version-added "Added in `langchain-openai` 0.3.26" """ use_responses_api: bool | None = None @@ -680,7 +771,7 @@ class BaseChatOpenAI(BaseChatModel): If not specified then will be inferred based on invocation params. - !!! version-added "Added in version 0.3.9" + !!! version-added "Added in `langchain-openai` 0.3.9" """ output_version: str | None = Field( @@ -698,7 +789,7 @@ class BaseChatOpenAI(BaseChatModel): (Responses API only) - `'v1'`: v1 of LangChain cross-provider standard. - !!! warning "Behavior changed in 1.0.0" + !!! warning "Behavior changed in `langchain-openai` 1.0.0" Default updated to `"responses/v1"`. """ @@ -775,10 +866,18 @@ class BaseChatOpenAI(BaseChatModel): ): self.stream_usage = True + # Resolve API key from SecretStr or Callable + sync_api_key_value: str | Callable[[], str] | None = None + async_api_key_value: str | Callable[[], Awaitable[str]] | None = None + + if self.openai_api_key is not None: + # Because OpenAI and AsyncOpenAI clients support either sync or async + # callables for the API key, we need to resolve separate values here. + sync_api_key_value, async_api_key_value = _resolve_sync_and_async_api_keys( + self.openai_api_key + ) + client_params: dict = { - "api_key": ( - self.openai_api_key.get_secret_value() if self.openai_api_key else None - ), "organization": self.openai_organization, "base_url": self.openai_api_base, "timeout": self.request_timeout, @@ -799,24 +898,33 @@ class BaseChatOpenAI(BaseChatModel): ) raise ValueError(msg) if not self.client: - if self.openai_proxy and not self.http_client: - try: - import httpx - except ImportError as e: - msg = ( - "Could not import httpx python package. " - "Please install it with `pip install httpx`." + if sync_api_key_value is None: + # No valid sync API key, leave client as None and raise informative + # error on invocation. + self.client = None + self.root_client = None + else: + if self.openai_proxy and not self.http_client: + try: + import httpx + except ImportError as e: + msg = ( + "Could not import httpx python package. " + "Please install it with `pip install httpx`." + ) + raise ImportError(msg) from e + self.http_client = httpx.Client( + proxy=self.openai_proxy, verify=global_ssl_context ) - raise ImportError(msg) from e - self.http_client = httpx.Client( - proxy=self.openai_proxy, verify=global_ssl_context - ) - sync_specific = { - "http_client": self.http_client - or _get_default_httpx_client(self.openai_api_base, self.request_timeout) - } - self.root_client = openai.OpenAI(**client_params, **sync_specific) # type: ignore[arg-type] - self.client = self.root_client.chat.completions + sync_specific = { + "http_client": self.http_client + or _get_default_httpx_client( + self.openai_api_base, self.request_timeout + ), + "api_key": sync_api_key_value, + } + self.root_client = openai.OpenAI(**client_params, **sync_specific) # type: ignore[arg-type] + self.client = self.root_client.chat.completions if not self.async_client: if self.openai_proxy and not self.http_async_client: try: @@ -834,7 +942,8 @@ class BaseChatOpenAI(BaseChatModel): "http_client": self.http_async_client or _get_default_async_httpx_client( self.openai_api_base, self.request_timeout - ) + ), + "api_key": async_api_key_value, } self.root_async_client = openai.AsyncOpenAI( **client_params, @@ -916,7 +1025,9 @@ class BaseChatOpenAI(BaseChatModel): ) usage_metadata: UsageMetadata | None = ( - _create_usage_metadata(token_usage) if token_usage else None + _create_usage_metadata(token_usage, chunk.get("service_tier")) + if token_usage + else None ) if len(choices) == 0: # logprobs is implicitly None @@ -962,6 +1073,16 @@ class BaseChatOpenAI(BaseChatModel): message=message_chunk, generation_info=generation_info or None ) + def _ensure_sync_client_available(self) -> None: + """Check that sync client is available, raise error if not.""" + if self.client is None: + msg = ( + "Sync client is not available. This happens when an async callable " + "was provided for the API key. Use async methods (ainvoke, astream) " + "instead, or provide a string or sync callable for the API key." + ) + raise ValueError(msg) + def _stream_responses( self, messages: list[BaseMessage], @@ -969,6 +1090,7 @@ class BaseChatOpenAI(BaseChatModel): run_manager: CallbackManagerForLLMRun | None = None, **kwargs: Any, ) -> Iterator[ChatGenerationChunk]: + self._ensure_sync_client_available() kwargs["stream"] = True payload = self._get_request_payload(messages, stop=stop, **kwargs) if self.include_response_headers: @@ -1098,6 +1220,7 @@ class BaseChatOpenAI(BaseChatModel): stream_usage: bool | None = None, **kwargs: Any, ) -> Iterator[ChatGenerationChunk]: + self._ensure_sync_client_available() kwargs["stream"] = True stream_usage = self._should_stream_usage(stream_usage, **kwargs) if stream_usage: @@ -1166,6 +1289,7 @@ class BaseChatOpenAI(BaseChatModel): run_manager: CallbackManagerForLLMRun | None = None, **kwargs: Any, ) -> ChatResult: + self._ensure_sync_client_available() payload = self._get_request_payload(messages, stop=stop, **kwargs) generation_info = None raw_response = None @@ -1288,11 +1412,14 @@ class BaseChatOpenAI(BaseChatModel): raise TypeError(msg) token_usage = response_dict.get("usage") + service_tier = response_dict.get("service_tier") for res in choices: message = _convert_dict_to_message(res["message"]) if token_usage and isinstance(message, AIMessage): - message.usage_metadata = _create_usage_metadata(token_usage) + message.usage_metadata = _create_usage_metadata( + token_usage, service_tier + ) generation_info = generation_info or {} generation_info["finish_reason"] = ( res.get("finish_reason") @@ -1311,8 +1438,8 @@ class BaseChatOpenAI(BaseChatModel): } if "id" in response_dict: llm_output["id"] = response_dict["id"] - if "service_tier" in response_dict: - llm_output["service_tier"] = response_dict["service_tier"] + if service_tier: + llm_output["service_tier"] = service_tier if isinstance(response, openai.BaseModel) and getattr( response, "choices", None @@ -1670,9 +1797,6 @@ class BaseChatOpenAI(BaseChatModel): parallel_tool_calls: Set to `False` to disable parallel tool use. Defaults to `None` (no specification, which allows parallel tool use). kwargs: Any additional parameters are passed directly to `bind`. - - !!! warning "Behavior changed in 0.1.21" - Support for `strict` argument added. """ # noqa: E501 if parallel_tool_calls is not None: kwargs["parallel_tool_calls"] = parallel_tool_calls @@ -1733,17 +1857,19 @@ class BaseChatOpenAI(BaseChatModel): Args: schema: The output schema. Can be passed in as: - - an OpenAI function/tool schema, - - a JSON Schema, - - a `TypedDict` class, - - or a Pydantic class. + - An OpenAI function/tool schema, + - A JSON Schema, + - A `TypedDict` class, + - Or a Pydantic class. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. + + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. method: The method for steering model generation, one of: @@ -1758,12 +1884,18 @@ class BaseChatOpenAI(BaseChatModel): formatting the output into the desired schema into the model call include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. strict: - `True`: @@ -1801,16 +1933,16 @@ class BaseChatOpenAI(BaseChatModel): \"\"\"Get weather at a location.\"\"\" pass - llm = init_chat_model("openai:gpt-4o-mini") + model = init_chat_model("openai:gpt-4o-mini") - structured_llm = llm.with_structured_output( + structured_model = model.with_structured_output( ResponseSchema, tools=[get_weather], strict=True, include_raw=True, ) - structured_llm.invoke("What's the weather in Boston?") + structured_model.invoke("What's the weather in Boston?") ``` ```python @@ -1824,25 +1956,25 @@ class BaseChatOpenAI(BaseChatModel): kwargs: Additional keyword args are passed through to the model. Returns: - A `Runnable` that takes same inputs as a `BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is `False` and `schema` is a Pydantic class, `Runnable` - outputs an instance of `schema` (i.e., a Pydantic object). Otherwise, if - `include_raw` is `False` then `Runnable` outputs a `dict`. + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - If `include_raw` is `True`, then `Runnable` outputs a dict with keys: + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` - - `'raw'`: `BaseMessage` - - `'parsed'`: `None` if there was a parsing error, otherwise the type depends - on the `schema` as described above. - - `'parsing_error'`: `BaseException` or `None` - - !!! warning "Behavior changed in 0.3.12" + !!! warning "Behavior changed in `langchain-openai` 0.3.12" Support for `tools` added. - !!! warning "Behavior changed in 0.3.21" + !!! warning "Behavior changed in `langchain-openai` 0.3.21" Pass `kwargs` through to the model. - """ # noqa: E501 + """ if strict is not None and method == "json_mode": msg = "Argument `strict` is not supported with `method`='json_mode'" raise ValueError(msg) @@ -2057,7 +2189,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ```python from langchain_openai import ChatOpenAI - llm = ChatOpenAI( + model = ChatOpenAI( model="...", temperature=0, max_tokens=None, @@ -2104,7 +2236,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ), ("human", "I love programming."), ] - llm.invoke(messages) + model.invoke(messages) ``` Results in an `AIMessage` response: @@ -2133,7 +2265,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] Stream a response from the model: ```python - for chunk in llm.stream(messages): + for chunk in model.stream(messages): print(chunk.text, end="") ``` @@ -2158,7 +2290,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] To collect the full message, you can concatenate the chunks: ```python - stream = llm.stream(messages) + stream = model.stream(messages) full = next(stream) for chunk in stream: full += chunk @@ -2178,13 +2310,13 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ```python # Invoke - await llm.ainvoke(messages) + await model.ainvoke(messages) # Stream - async for chunk in (await llm.astream(messages)) + async for chunk in (await model.astream(messages)) # Batch - await llm.abatch([messages]) + await model.abatch([messages]) ``` Results in an `AIMessage` response: @@ -2236,11 +2368,11 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ) - llm_with_tools = llm.bind_tools( + model_with_tools = model.bind_tools( [GetWeather, GetPopulation] # strict = True # Enforce tool args schema is respected ) - ai_msg = llm_with_tools.invoke( + ai_msg = model_with_tools.invoke( "Which city is hotter today and which is bigger: LA or NY?" ) ai_msg.tool_calls @@ -2277,7 +2409,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] be set to `False` to disable parallel tool calls: ```python - ai_msg = llm_with_tools.invoke( + ai_msg = model_with_tools.invoke( "What is the weather in LA and NY?", parallel_tool_calls=False ) ai_msg.tool_calls @@ -2294,7 +2426,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ``` Like other runtime parameters, `parallel_tool_calls` can be bound to a model - using `llm.bind(parallel_tool_calls=False)` or during instantiation by + using `model.bind(parallel_tool_calls=False)` or during instantiation by setting `model_kwargs`. See `bind_tools` for more. @@ -2308,12 +2440,12 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ```python from langchain_openai import ChatOpenAI - llm = ChatOpenAI(model="...", output_version="responses/v1") + model = ChatOpenAI(model="...", output_version="responses/v1") tool = {"type": "web_search"} - llm_with_tools = llm.bind_tools([tool]) + model_with_tools = model.bind_tools([tool]) - response = llm_with_tools.invoke("What was a positive news story from today?") + response = model_with_tools.invoke("What was a positive news story from today?") response.content ``` @@ -2335,9 +2467,9 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ] ``` - !!! version-added "Added in version 0.3.9" + !!! version-added "Added in `langchain-openai` 0.3.9" - !!! version-added "Added in version 0.3.26: Updated `AIMessage` format" + !!! version-added "Added in `langchain-openai` 0.3.26: Updated `AIMessage` format" [`langchain-openai >= 0.3.26`](https://pypi.org/project/langchain-openai/#history) allows users to opt-in to an updated `AIMessage` format when using the Responses API. Setting `ChatOpenAI(..., output_version="responses/v1")` will @@ -2354,12 +2486,12 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ```python from langchain_openai import ChatOpenAI - llm = ChatOpenAI( + model = ChatOpenAI( model="...", use_responses_api=True, output_version="responses/v1", ) - response = llm.invoke("Hi, I'm Bob.") + response = model.invoke("Hi, I'm Bob.") response.text ``` @@ -2368,7 +2500,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ``` ```python - second_response = llm.invoke( + second_response = model.invoke( "What is my name?", previous_response_id=response.response_metadata["id"], ) @@ -2379,16 +2511,16 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] "Your name is Bob. How can I help you today, Bob?" ``` - !!! version-added "Added in version 0.3.9" + !!! version-added "Added in `langchain-openai` 0.3.9" - !!! version-added "Added in version 0.3.26" + !!! version-added "Added in `langchain-openai` 0.3.26" You can also initialize `ChatOpenAI` with `use_previous_response_id`. Input messages up to the most recent response will then be dropped from request payloads, and `previous_response_id` will be set using the ID of the most recent response. ```python - llm = ChatOpenAI(model="...", use_previous_response_id=True) + model = ChatOpenAI(model="...", use_previous_response_id=True) ``` ??? info "Reasoning output" @@ -2404,10 +2536,10 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] "summary": "auto", # 'detailed', 'auto', or None } - llm = ChatOpenAI( + model = ChatOpenAI( model="...", reasoning=reasoning, output_version="responses/v1" ) - response = llm.invoke("What is 3^3?") + response = model.invoke("What is 3^3?") # Response text print(f"Output: {response.text}") @@ -2424,7 +2556,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] Reasoning: The user wants to know... ``` - !!! version-added "Added in version 0.3.26: Updated `AIMessage` format" + !!! version-added "Added in `langchain-openai` 0.3.26: Updated `AIMessage` format" [`langchain-openai >= 0.3.26`](https://pypi.org/project/langchain-openai/#history) allows users to opt-in to an updated `AIMessage` format when using the Responses API. Setting `ChatOpenAI(..., output_version="responses/v1")` will @@ -2448,8 +2580,8 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ) - structured_llm = llm.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats") + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats") ``` ```python @@ -2465,8 +2597,8 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ??? info "JSON mode" ```python - json_llm = llm.bind(response_format={"type": "json_object"}) - ai_msg = json_llm.invoke( + json_model = model.bind(response_format={"type": "json_object"}) + ai_msg = json_model.invoke( "Return a JSON object with key 'random_ints' and a value of 10 random ints in [0-99]" ) ai_msg.content @@ -2495,7 +2627,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ] ) - ai_msg = llm.invoke([message]) + ai_msg = model.invoke([message]) ai_msg.content ``` @@ -2506,7 +2638,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ??? info "Token usage" ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.usage_metadata ```txt @@ -2516,7 +2648,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] When streaming, set the `stream_usage` kwarg: ```python - stream = llm.stream(messages, stream_usage=True) + stream = model.stream(messages, stream_usage=True) full = next(stream) for chunk in stream: full += chunk @@ -2530,8 +2662,8 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ??? info "Logprobs" ```python - logprobs_llm = llm.bind(logprobs=True) - ai_msg = logprobs_llm.invoke(messages) + logprobs_model = model.bind(logprobs=True) + ai_msg = logprobs_model.invoke(messages) ai_msg.response_metadata["logprobs"] ``` @@ -2590,7 +2722,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ??? info "Response metadata" ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.response_metadata ``` @@ -2621,7 +2753,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ```python from langchain_openai import ChatOpenAI - llm = ChatOpenAI(model="...", service_tier="flex") + model = ChatOpenAI(model="...", service_tier="flex") ``` Note that this is a beta feature that is only available for a subset of models. @@ -2639,7 +2771,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ```python title="LM Studio example with TTL (auto-eviction)" from langchain_openai import ChatOpenAI - llm = ChatOpenAI( + model = ChatOpenAI( base_url="http://localhost:1234/v1", api_key="lm-studio", # Can be any string model="mlx-community/QwQ-32B-4bit", @@ -2649,7 +2781,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ``` ```python title="vLLM example with custom parameters" - llm = ChatOpenAI( + model = ChatOpenAI( base_url="http://localhost:8000/v1", api_key="EMPTY", model="meta-llama/Llama-2-7b-chat-hf", @@ -2669,7 +2801,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ```python # Standard OpenAI parameters - llm = ChatOpenAI( + model = ChatOpenAI( model="...", model_kwargs={ "stream_options": {"include_usage": True}, @@ -2683,13 +2815,13 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] **Use `extra_body` for:** - Custom parameters specific to OpenAI-compatible providers (vLLM, LM Studio, - etc.) + OpenRouter, etc.) - Parameters that need to be nested under `extra_body` in the request - Any non-standard OpenAI API parameters ```python # Custom provider parameters - llm = ChatOpenAI( + model = ChatOpenAI( base_url="http://localhost:8000/v1", model="custom-model", extra_body={ @@ -2715,19 +2847,19 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] per-invocation to improve cache hit rates and reduce costs: ```python - llm = ChatOpenAI(model="...") + model = ChatOpenAI(model="...") - response = llm.invoke( + response = model.invoke( messages, prompt_cache_key="example-key-a", # Routes to same machine for cache hits ) - customer_response = llm.invoke(messages, prompt_cache_key="example-key-b") - support_response = llm.invoke(messages, prompt_cache_key="example-key-c") + customer_response = model.invoke(messages, prompt_cache_key="example-key-b") + support_response = model.invoke(messages, prompt_cache_key="example-key-c") # Dynamic cache keys based on context cache_key = f"example-key-{dynamic_suffix}" - response = llm.invoke(messages, prompt_cache_key=cache_key) + response = model.invoke(messages, prompt_cache_key=cache_key) ``` Cache keys help ensure requests with the same prompt prefix are routed to @@ -2745,7 +2877,11 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object.""" + """Get the namespace of the LangChain object. + + Returns: + `["langchain", "chat_models", "openai"]` + """ return ["langchain", "chat_models", "openai"] @property @@ -2829,17 +2965,19 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] Args: schema: The output schema. Can be passed in as: + - an OpenAI function/tool schema, - a JSON Schema, - a `TypedDict` class, - - or a Pydantic class, - - an OpenAI function/tool schema. + - or a Pydantic class. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. + + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. method: The method for steering model generation, one of: @@ -2857,12 +2995,18 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] Learn more about the [differences between methods](https://platform.openai.com/docs/guides/structured-outputs/function-calling-vs-response-format). include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a `BaseMessage`) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. strict: - `True`: @@ -2905,16 +3049,16 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] \"\"\"Get weather at a location.\"\"\" pass - llm = init_chat_model("openai:gpt-4o-mini") + model = init_chat_model("openai:gpt-4o-mini") - structured_llm = llm.with_structured_output( + structured_model = model.with_structured_output( ResponseSchema, tools=[get_weather], strict=True, include_raw=True, ) - structured_llm.invoke("What's the weather in Boston?") + structured_model.invoke("What's the weather in Boston?") ``` ```python @@ -2928,24 +3072,26 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] kwargs: Additional keyword args are passed through to the model. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is `False` and `schema` is a Pydantic class, Runnable outputs - an instance of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is `False` then `Runnable` outputs a `dict`. + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` - - `'raw'`: `BaseMessage` - - `'parsed'`: `None` if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: `BaseException` or `None` - - !!! warning "Behavior changed in 0.3.0" + !!! warning "Behavior changed in `langchain-openai` 0.3.0" `method` default changed from `"function_calling"` to `"json_schema"`. - !!! warning "Behavior changed in 0.3.12" + !!! warning "Behavior changed in `langchain-openai` 0.3.12" Support for `tools` added. - !!! warning "Behavior changed in 0.3.21" + !!! warning "Behavior changed in `langchain-openai` 0.3.21" Pass `kwargs` through to the model. ??? note "Example: `schema=Pydantic` class, `method='json_schema'`, `include_raw=False`, `strict=True`" @@ -2971,10 +3117,10 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ) - llm = ChatOpenAI(model="...", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = ChatOpenAI(model="...", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) ``` @@ -3002,12 +3148,12 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ) - llm = ChatOpenAI(model="...", temperature=0) - structured_llm = llm.with_structured_output( + model = ChatOpenAI(model="...", temperature=0) + structured_model = model.with_structured_output( AnswerWithJustification, method="function_calling" ) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) ``` @@ -3033,12 +3179,12 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] justification: str - llm = ChatOpenAI(model="...", temperature=0) - structured_llm = llm.with_structured_output( + model = ChatOpenAI(model="...", temperature=0) + structured_model = model.with_structured_output( AnswerWithJustification, include_raw=True ) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) ``` @@ -3085,10 +3231,10 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ] - llm = ChatOpenAI(model="...", temperature=0) - structured_llm = llm.with_structured_output(AnswerWithJustification) + model = ChatOpenAI(model="...", temperature=0) + structured_model = model.with_structured_output(AnswerWithJustification) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) ``` @@ -3121,10 +3267,10 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] }, } - llm = ChatOpenAI(model="...", temperature=0) - structured_llm = llm.with_structured_output(oai_schema) + model = ChatOpenAI(model="...", temperature=0) + structured_model = model.with_structured_output(oai_schema) - structured_llm.invoke( + structured_model.invoke( "What weighs more a pound of bricks or a pound of feathers" ) ``` @@ -3148,12 +3294,12 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] justification: str - llm = ChatOpenAI(model="...", temperature=0) - structured_llm = llm.with_structured_output( + model = ChatOpenAI(model="...", temperature=0) + structured_model = model.with_structured_output( AnswerWithJustification, method="json_mode", include_raw=True ) - structured_llm.invoke( + structured_model.invoke( "Answer the following question. " "Make sure to return a JSON blob with keys 'answer' and 'justification'.\\n\\n" "What's heavier a pound of bricks or a pound of feathers?" @@ -3176,11 +3322,11 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override] ??? note "Example: `schema=None`, `method='json_mode'`, `include_raw=True`" ```python - structured_llm = llm.with_structured_output( + structured_model = model.with_structured_output( method="json_mode", include_raw=True ) - structured_llm.invoke( + structured_model.invoke( "Answer the following question. " "Make sure to return a JSON blob with keys 'answer' and 'justification'.\\n\\n" "What's heavier a pound of bricks or a pound of feathers?" @@ -3386,26 +3532,40 @@ class OpenAIRefusalError(Exception): """ -def _create_usage_metadata(oai_token_usage: dict) -> UsageMetadata: +def _create_usage_metadata( + oai_token_usage: dict, service_tier: str | None = None +) -> UsageMetadata: input_tokens = oai_token_usage.get("prompt_tokens") or 0 output_tokens = oai_token_usage.get("completion_tokens") or 0 total_tokens = oai_token_usage.get("total_tokens") or input_tokens + output_tokens + if service_tier not in {"priority", "flex"}: + service_tier = None + service_tier_prefix = f"{service_tier}_" if service_tier else "" input_token_details: dict = { "audio": (oai_token_usage.get("prompt_tokens_details") or {}).get( "audio_tokens" ), - "cache_read": (oai_token_usage.get("prompt_tokens_details") or {}).get( - "cached_tokens" - ), + f"{service_tier_prefix}cache_read": ( + oai_token_usage.get("prompt_tokens_details") or {} + ).get("cached_tokens"), } output_token_details: dict = { "audio": (oai_token_usage.get("completion_tokens_details") or {}).get( "audio_tokens" ), - "reasoning": (oai_token_usage.get("completion_tokens_details") or {}).get( - "reasoning_tokens" - ), + f"{service_tier_prefix}reasoning": ( + oai_token_usage.get("completion_tokens_details") or {} + ).get("reasoning_tokens"), } + if service_tier is not None: + # Avoid counting cache and reasoning tokens towards the service tier token + # counts, since service tier tokens are already priced differently + input_token_details[service_tier] = input_tokens - input_token_details.get( + f"{service_tier_prefix}cache_read", 0 + ) + output_token_details[service_tier] = output_tokens - output_token_details.get( + f"{service_tier_prefix}reasoning", 0 + ) return UsageMetadata( input_tokens=input_tokens, output_tokens=output_tokens, @@ -3419,20 +3579,34 @@ def _create_usage_metadata(oai_token_usage: dict) -> UsageMetadata: ) -def _create_usage_metadata_responses(oai_token_usage: dict) -> UsageMetadata: +def _create_usage_metadata_responses( + oai_token_usage: dict, service_tier: str | None = None +) -> UsageMetadata: input_tokens = oai_token_usage.get("input_tokens", 0) output_tokens = oai_token_usage.get("output_tokens", 0) total_tokens = oai_token_usage.get("total_tokens", input_tokens + output_tokens) + if service_tier not in {"priority", "flex"}: + service_tier = None + service_tier_prefix = f"{service_tier}_" if service_tier else "" output_token_details: dict = { - "reasoning": (oai_token_usage.get("output_tokens_details") or {}).get( - "reasoning_tokens" - ) + f"{service_tier_prefix}reasoning": ( + oai_token_usage.get("output_tokens_details") or {} + ).get("reasoning_tokens") } input_token_details: dict = { - "cache_read": (oai_token_usage.get("input_tokens_details") or {}).get( - "cached_tokens" - ) + f"{service_tier_prefix}cache_read": ( + oai_token_usage.get("input_tokens_details") or {} + ).get("cached_tokens") } + if service_tier is not None: + # Avoid counting cache and reasoning tokens towards the service tier token + # counts, since service tier tokens are already priced differently + output_token_details[service_tier] = output_tokens - output_token_details.get( + f"{service_tier_prefix}reasoning", 0 + ) + input_token_details[service_tier] = input_tokens - input_token_details.get( + f"{service_tier_prefix}cache_read", 0 + ) return UsageMetadata( input_tokens=input_tokens, output_tokens=output_tokens, @@ -3469,7 +3643,8 @@ def _get_last_messages( ) -> tuple[Sequence[BaseMessage], str | None]: """Get the last part of the conversation after the last `AIMessage` with an `id`. - Return: + Will return: + 1. Every message after the most-recent `AIMessage` that has a non-empty `response_metadata["id"]` (may be an empty list), 2. That `id`. @@ -3956,7 +4131,9 @@ def _construct_lc_result_from_responses_api( response_metadata["model_provider"] = "openai" response_metadata["model_name"] = response_metadata.get("model") if response.usage: - usage_metadata = _create_usage_metadata_responses(response.usage.model_dump()) + usage_metadata = _create_usage_metadata_responses( + response.usage.model_dump(), response.service_tier + ) else: usage_metadata = None @@ -4174,7 +4351,7 @@ def _convert_responses_chunk_to_generation_chunk( elif chunk.type == "response.created": id = chunk.response.id response_metadata["id"] = chunk.response.id # Backwards compatibility - elif chunk.type == "response.completed": + elif chunk.type in ("response.completed", "response.incomplete"): msg = cast( AIMessage, ( diff --git a/libs/partners/openai/langchain_openai/embeddings/azure.py b/libs/partners/openai/langchain_openai/embeddings/azure.py index 3b1d03d8797..1c854dbf292 100644 --- a/libs/partners/openai/langchain_openai/embeddings/azure.py +++ b/libs/partners/openai/langchain_openai/embeddings/azure.py @@ -38,14 +38,11 @@ class AzureOpenAIEmbeddings(OpenAIEmbeddings): # type: ignore[override] ``` Key init args β€” completion params: - model: str - Name of AzureOpenAI model to use. - dimensions: int | None - Number of dimensions for the embeddings. Can be specified only - if the underlying model supports it. - - Key init args β€” client params: - api_key: SecretStr | None + model: + Name of `AzureOpenAI` model to use. + dimensions: + Number of dimensions for the embeddings. Can be specified only if the + underlying model supports it. See full list of supported init args and their descriptions in the params section. @@ -75,12 +72,11 @@ class AzureOpenAIEmbeddings(OpenAIEmbeddings): # type: ignore[override] Embed multiple texts: ```python input_texts = ["Document 1...", "Document 2..."] + vectors = embed.embed_documents(input_texts) + print(len(vectors)) + # The first 3 coordinates for the first vector + print(vectors[0][:3]) ``` - vectors = embed.embed_documents(input_texts) - print(len(vectors)) - # The first 3 coordinates for the first vector - print(vectors[0][:3]) - ```python 2 [-0.024603435769677162, -0.007543657906353474, 0.0039630369283258915] diff --git a/libs/partners/openai/langchain_openai/embeddings/base.py b/libs/partners/openai/langchain_openai/embeddings/base.py index 087bfd71b99..c2a87fe06b4 100644 --- a/libs/partners/openai/langchain_openai/embeddings/base.py +++ b/libs/partners/openai/langchain_openai/embeddings/base.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging import warnings -from collections.abc import Iterable, Mapping, Sequence +from collections.abc import Awaitable, Callable, Iterable, Mapping, Sequence from typing import Any, Literal, cast import openai @@ -15,6 +15,8 @@ from langchain_core.utils import from_env, get_pydantic_field_names, secret_from from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator from typing_extensions import Self +from langchain_openai.chat_models._client_utils import _resolve_sync_and_async_api_keys + logger = logging.getLogger(__name__) @@ -90,21 +92,21 @@ class OpenAIEmbeddings(BaseModel, Embeddings): ``` Key init args β€” embedding params: - model: str + model: Name of OpenAI model to use. - dimensions: int | None = None + dimensions: The number of dimensions the resulting output embeddings should have. Only supported in `'text-embedding-3'` and later models. Key init args β€” client params: - api_key: SecretStr | None = None + api_key: OpenAI API key. - organization: str | None = None + organization: OpenAI organization ID. If not passed in will be read from env var `OPENAI_ORG_ID`. - max_retries: int = 2 + max_retries: Maximum number of retries to make when generating. - request_timeout: float | Tuple[float, float] | Any | None = None + request_timeout: Timeout for requests to OpenAI completion API See full list of supported init args and their descriptions in the params section. @@ -157,42 +159,55 @@ class OpenAIEmbeddings(BaseModel, Embeddings): ``` """ - client: Any = Field(default=None, exclude=True) #: :meta private: - async_client: Any = Field(default=None, exclude=True) #: :meta private: + client: Any = Field(default=None, exclude=True) + + async_client: Any = Field(default=None, exclude=True) + model: str = "text-embedding-ada-002" + dimensions: int | None = None """The number of dimensions the resulting output embeddings should have. Only supported in `text-embedding-3` and later models. """ + # to support Azure OpenAI Service custom deployment names deployment: str | None = model + # TODO: Move to AzureOpenAIEmbeddings. openai_api_version: str | None = Field( default_factory=from_env("OPENAI_API_VERSION", default=None), alias="api_version", ) """Automatically inferred from env var `OPENAI_API_VERSION` if not provided.""" + # to support Azure OpenAI Service custom endpoints openai_api_base: str | None = Field( alias="base_url", default_factory=from_env("OPENAI_API_BASE", default=None) ) """Base URL path for API requests, leave blank if not using a proxy or service emulator.""" + # to support Azure OpenAI Service custom endpoints openai_api_type: str | None = Field( default_factory=from_env("OPENAI_API_TYPE", default=None) ) + # to support explicit proxy for OpenAI openai_proxy: str | None = Field( default_factory=from_env("OPENAI_PROXY", default=None) ) + embedding_ctx_length: int = 8191 """The maximum number of tokens to embed at once.""" - openai_api_key: SecretStr | None = Field( + + openai_api_key: ( + SecretStr | None | Callable[[], str] | Callable[[], Awaitable[str]] + ) = Field( alias="api_key", default_factory=secret_from_env("OPENAI_API_KEY", default=None) ) """Automatically inferred from env var `OPENAI_API_KEY` if not provided.""" + openai_organization: str | None = Field( alias="organization", default_factory=from_env( @@ -200,21 +215,29 @@ class OpenAIEmbeddings(BaseModel, Embeddings): ), ) """Automatically inferred from env var `OPENAI_ORG_ID` if not provided.""" + allowed_special: Literal["all"] | set[str] | None = None + disallowed_special: Literal["all"] | set[str] | Sequence[str] | None = None + chunk_size: int = 1000 """Maximum number of texts to embed in each batch""" + max_retries: int = 2 """Maximum number of retries to make when generating.""" + request_timeout: float | tuple[float, float] | Any | None = Field( default=None, alias="timeout" ) """Timeout for requests to OpenAI completion API. Can be float, `httpx.Timeout` or None.""" + headers: Any = None + tiktoken_enabled: bool = True """Set this to False for non-OpenAI implementations of the embeddings API, e.g. the `--extensions openai` extension for `text-generation-webui`""" + tiktoken_model_name: str | None = None """The model name to pass to tiktoken when using this class. Tiktoken is used to count the number of tokens in documents to constrain @@ -225,29 +248,39 @@ class OpenAIEmbeddings(BaseModel, Embeddings): when using one of the many model providers that expose an OpenAI-like API but with different models. In those cases, in order to avoid erroring when tiktoken is called, you can specify a model name to use here.""" + show_progress_bar: bool = False """Whether to show a progress bar when embedding.""" + model_kwargs: dict[str, Any] = Field(default_factory=dict) """Holds any model parameters valid for `create` call not explicitly specified.""" + skip_empty: bool = False - """Whether to skip empty strings when embedding or raise an error. - Defaults to not skipping.""" + """Whether to skip empty strings when embedding or raise an error.""" + default_headers: Mapping[str, str] | None = None + default_query: Mapping[str, object] | None = None + # Configure a custom httpx client. See the # [httpx documentation](https://www.python-httpx.org/api/#client) for more details. + retry_min_seconds: int = 4 """Min number of seconds to wait between retries""" + retry_max_seconds: int = 20 """Max number of seconds to wait between retries""" + http_client: Any | None = None """Optional `httpx.Client`. Only used for sync invocations. Must specify `http_async_client` as well if you'd like a custom client for async invocations. """ + http_async_client: Any | None = None """Optional `httpx.AsyncClient`. Only used for async invocations. Must specify `http_client` as well if you'd like a custom client for sync invocations.""" + check_embedding_ctx_length: bool = True """Whether to check the token length of inputs and automatically split inputs longer than embedding_ctx_length.""" @@ -293,10 +326,19 @@ class OpenAIEmbeddings(BaseModel, Embeddings): "If you are using Azure, please use the `AzureOpenAIEmbeddings` class." ) raise ValueError(msg) + + # Resolve API key from SecretStr or Callable + sync_api_key_value: str | Callable[[], str] | None = None + async_api_key_value: str | Callable[[], Awaitable[str]] | None = None + + if self.openai_api_key is not None: + # Because OpenAI and AsyncOpenAI clients support either sync or async + # callables for the API key, we need to resolve separate values here. + sync_api_key_value, async_api_key_value = _resolve_sync_and_async_api_keys( + self.openai_api_key + ) + client_params: dict = { - "api_key": ( - self.openai_api_key.get_secret_value() if self.openai_api_key else None - ), "organization": self.openai_organization, "base_url": self.openai_api_base, "timeout": self.request_timeout, @@ -316,18 +358,26 @@ class OpenAIEmbeddings(BaseModel, Embeddings): ) raise ValueError(msg) if not self.client: - if self.openai_proxy and not self.http_client: - try: - import httpx - except ImportError as e: - msg = ( - "Could not import httpx python package. " - "Please install it with `pip install httpx`." - ) - raise ImportError(msg) from e - self.http_client = httpx.Client(proxy=self.openai_proxy) - sync_specific = {"http_client": self.http_client} - self.client = openai.OpenAI(**client_params, **sync_specific).embeddings # type: ignore[arg-type] + if sync_api_key_value is None: + # No valid sync API key, leave client as None and raise informative + # error on invocation. + self.client = None + else: + if self.openai_proxy and not self.http_client: + try: + import httpx + except ImportError as e: + msg = ( + "Could not import httpx python package. " + "Please install it with `pip install httpx`." + ) + raise ImportError(msg) from e + self.http_client = httpx.Client(proxy=self.openai_proxy) + sync_specific = { + "http_client": self.http_client, + "api_key": sync_api_key_value, + } + self.client = openai.OpenAI(**client_params, **sync_specific).embeddings # type: ignore[arg-type] if not self.async_client: if self.openai_proxy and not self.http_async_client: try: @@ -339,7 +389,10 @@ class OpenAIEmbeddings(BaseModel, Embeddings): ) raise ImportError(msg) from e self.http_async_client = httpx.AsyncClient(proxy=self.openai_proxy) - async_specific = {"http_client": self.http_async_client} + async_specific = { + "http_client": self.http_async_client, + "api_key": async_api_key_value, + } self.async_client = openai.AsyncOpenAI( **client_params, **async_specific, # type: ignore[arg-type] @@ -353,6 +406,16 @@ class OpenAIEmbeddings(BaseModel, Embeddings): params["dimensions"] = self.dimensions return params + def _ensure_sync_client_available(self) -> None: + """Check that sync client is available, raise error if not.""" + if self.client is None: + msg = ( + "Sync client is not available. This happens when an async callable " + "was provided for the API key. Use async methods (ainvoke, astream) " + "instead, or provide a string or sync callable for the API key." + ) + raise ValueError(msg) + def _tokenize( self, texts: list[str], chunk_size: int ) -> tuple[Iterable[int], list[list[int] | str], list[int]]: @@ -466,12 +529,12 @@ class OpenAIEmbeddings(BaseModel, Embeddings): and HuggingFace tokenizer based on the tiktoken_enabled flag. Args: - texts (List[str]): A list of texts to embed. - engine (str): The engine or model to use for embeddings. - chunk_size (int | None): The size of chunks for processing embeddings. + texts: A list of texts to embed. + engine: The engine or model to use for embeddings. + chunk_size: The size of chunks for processing embeddings. Returns: - List[List[float]]: A list of embeddings for each input text. + A list of embeddings for each input text. """ _chunk_size = chunk_size or self.chunk_size client_kwargs = {**self._invocation_params, **kwargs} @@ -518,12 +581,12 @@ class OpenAIEmbeddings(BaseModel, Embeddings): `tiktoken` and HuggingFace `tokenizer` based on the tiktoken_enabled flag. Args: - texts (List[str]): A list of texts to embed. - engine (str): The engine or model to use for embeddings. - chunk_size (int | None): The size of chunks for processing embeddings. + texts: A list of texts to embed. + engine: The engine or model to use for embeddings. + chunk_size: The size of chunks for processing embeddings. Returns: - List[List[float]]: A list of embeddings for each input text. + A list of embeddings for each input text. """ _chunk_size = chunk_size or self.chunk_size client_kwargs = {**self._invocation_params, **kwargs} @@ -572,6 +635,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings): Returns: List of embeddings, one for each text. """ + self._ensure_sync_client_available() chunk_size_ = chunk_size or self.chunk_size client_kwargs = {**self._invocation_params, **kwargs} if not self.check_embedding_ctx_length: @@ -636,6 +700,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings): Returns: Embedding for the text. """ + self._ensure_sync_client_available() return self.embed_documents([text], **kwargs)[0] async def aembed_query(self, text: str, **kwargs: Any) -> list[float]: diff --git a/libs/partners/openai/langchain_openai/llms/azure.py b/libs/partners/openai/langchain_openai/llms/azure.py index dc63d32f8e7..e86e9113867 100644 --- a/libs/partners/openai/langchain_openai/llms/azure.py +++ b/libs/partners/openai/langchain_openai/llms/azure.py @@ -97,7 +97,11 @@ class AzureOpenAI(BaseOpenAI): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object.""" + """Get the namespace of the LangChain object. + + Returns: + `["langchain", "llms", "openai"]` + """ return ["langchain", "llms", "openai"] @property diff --git a/libs/partners/openai/langchain_openai/llms/base.py b/libs/partners/openai/langchain_openai/llms/base.py index 1668854bba2..edd5c837e62 100644 --- a/libs/partners/openai/langchain_openai/llms/base.py +++ b/libs/partners/openai/langchain_openai/llms/base.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging import sys -from collections.abc import AsyncIterator, Collection, Iterator, Mapping +from collections.abc import AsyncIterator, Callable, Collection, Iterator, Mapping from typing import Any, Literal import openai @@ -62,46 +62,46 @@ class BaseOpenAI(BaseLLM): ``` Key init args β€” completion params: - model_name: str + model_name: Name of OpenAI model to use. - temperature: float + temperature: Sampling temperature. - max_tokens: int + max_tokens: Max number of tokens to generate. - top_p: float + top_p: Total probability mass of tokens to consider at each step. - frequency_penalty: float + frequency_penalty: Penalizes repeated tokens according to frequency. - presence_penalty: float + presence_penalty: Penalizes repeated tokens. - n: int + n: How many completions to generate for each prompt. - best_of: int + best_of: Generates best_of completions server-side and returns the "best". - logit_bias: dict[str, float] | None + logit_bias: Adjust the probability of specific tokens being generated. - seed: int | None + seed: Seed for generation. - logprobs: int | None + logprobs: Include the log probabilities on the logprobs most likely output tokens. - streaming: bool + streaming: Whether to stream the results or not. Key init args β€” client params: - openai_api_key: SecretStr | None + openai_api_key: OpenAI API key. If not passed in will be read from env var `OPENAI_API_KEY`. - openai_api_base: str | None + openai_api_base: Base URL path for API requests, leave blank if not using a proxy or service emulator. - openai_organization: str | None + openai_organization: OpenAI organization ID. If not passed in will be read from env var `OPENAI_ORG_ID`. - request_timeout: Union[float, tuple[float, float], Any, None] + request_timeout: Timeout for requests to OpenAI completion API. - max_retries: int + max_retries: Maximum number of retries to make when generating. - batch_size: int + batch_size: Batch size to use when passing multiple documents to generate. See full list of supported init args and their descriptions in the params section. @@ -110,7 +110,7 @@ class BaseOpenAI(BaseLLM): ```python from langchain_openai.llms.base import BaseOpenAI - llm = BaseOpenAI( + model = BaseOpenAI( model_name="gpt-3.5-turbo-instruct", temperature=0.7, max_tokens=256, @@ -127,7 +127,7 @@ class BaseOpenAI(BaseLLM): Invoke: ```python input_text = "The meaning of life is " - response = llm.invoke(input_text) + response = model.invoke(input_text) print(response) ``` @@ -138,7 +138,7 @@ class BaseOpenAI(BaseLLM): Stream: ```python - for chunk in llm.stream(input_text): + for chunk in model.stream(input_text): print(chunk, end="") ``` ```txt @@ -148,14 +148,14 @@ class BaseOpenAI(BaseLLM): Async: ```python - response = await llm.ainvoke(input_text) + response = await model.ainvoke(input_text) # stream: - # async for chunk in llm.astream(input_text): + # async for chunk in model.astream(input_text): # print(chunk, end="") # batch: - # await llm.abatch([input_text]) + # await model.abatch([input_text]) ``` ``` "a philosophical question that has been debated by thinkers and @@ -164,37 +164,50 @@ class BaseOpenAI(BaseLLM): """ - client: Any = Field(default=None, exclude=True) #: :meta private: - async_client: Any = Field(default=None, exclude=True) #: :meta private: + client: Any = Field(default=None, exclude=True) + + async_client: Any = Field(default=None, exclude=True) + model_name: str = Field(default="gpt-3.5-turbo-instruct", alias="model") """Model name to use.""" + temperature: float = 0.7 """What sampling temperature to use.""" + max_tokens: int = 256 """The maximum number of tokens to generate in the completion. -1 returns as many tokens as possible given the prompt and the models maximal context size.""" + top_p: float = 1 """Total probability mass of tokens to consider at each step.""" + frequency_penalty: float = 0 """Penalizes repeated tokens according to frequency.""" + presence_penalty: float = 0 """Penalizes repeated tokens.""" + n: int = 1 """How many completions to generate for each prompt.""" + best_of: int = 1 """Generates best_of completions server-side and returns the "best".""" + model_kwargs: dict[str, Any] = Field(default_factory=dict) """Holds any model parameters valid for `create` call not explicitly specified.""" - openai_api_key: SecretStr | None = Field( + + openai_api_key: SecretStr | None | Callable[[], str] = Field( alias="api_key", default_factory=secret_from_env("OPENAI_API_KEY", default=None) ) """Automatically inferred from env var `OPENAI_API_KEY` if not provided.""" + openai_api_base: str | None = Field( alias="base_url", default_factory=from_env("OPENAI_API_BASE", default=None) ) """Base URL path for API requests, leave blank if not using a proxy or service emulator.""" + openai_organization: str | None = Field( alias="organization", default_factory=from_env( @@ -202,32 +215,43 @@ class BaseOpenAI(BaseLLM): ), ) """Automatically inferred from env var `OPENAI_ORG_ID` if not provided.""" + # to support explicit proxy for OpenAI openai_proxy: str | None = Field( default_factory=from_env("OPENAI_PROXY", default=None) ) + batch_size: int = 20 """Batch size to use when passing multiple documents to generate.""" + request_timeout: float | tuple[float, float] | Any | None = Field( default=None, alias="timeout" ) """Timeout for requests to OpenAI completion API. Can be float, `httpx.Timeout` or None.""" + logit_bias: dict[str, float] | None = None """Adjust the probability of specific tokens being generated.""" + max_retries: int = 2 """Maximum number of retries to make when generating.""" + seed: int | None = None """Seed for generation""" + logprobs: int | None = None """Include the log probabilities on the logprobs most likely output tokens, as well the chosen tokens.""" + streaming: bool = False """Whether to stream the results or not.""" + allowed_special: Literal["all"] | set[str] = set() """Set of special tokens that are allowed。""" + disallowed_special: Literal["all"] | Collection[str] = "all" """Set of special tokens that are not allowed。""" + tiktoken_model_name: str | None = None """The model name to pass to tiktoken when using this class. Tiktoken is used to count the number of tokens in documents to constrain @@ -238,8 +262,11 @@ class BaseOpenAI(BaseLLM): when using one of the many model providers that expose an OpenAI-like API but with different models. In those cases, in order to avoid erroring when tiktoken is called, you can specify a model name to use here.""" + default_headers: Mapping[str, str] | None = None + default_query: Mapping[str, object] | None = None + # Configure a custom httpx client. See the # [httpx documentation](https://www.python-httpx.org/api/#client) for more details. http_client: Any | None = None @@ -247,9 +274,11 @@ class BaseOpenAI(BaseLLM): `http_async_client` as well if you'd like a custom client for async invocations. """ + http_async_client: Any | None = None """Optional `httpx.AsyncClient`. Only used for async invocations. Must specify `http_client` as well if you'd like a custom client for sync invocations.""" + extra_body: Mapping[str, Any] | None = None """Optional additional JSON properties to include in the request parameters when making requests to OpenAI compatible APIs, such as vLLM.""" @@ -276,10 +305,16 @@ class BaseOpenAI(BaseLLM): msg = "Cannot stream results when best_of > 1." raise ValueError(msg) + # Resolve API key from SecretStr or Callable + api_key_value: str | Callable[[], str] | None = None + if self.openai_api_key is not None: + if isinstance(self.openai_api_key, SecretStr): + api_key_value = self.openai_api_key.get_secret_value() + elif callable(self.openai_api_key): + api_key_value = self.openai_api_key + client_params: dict = { - "api_key": ( - self.openai_api_key.get_secret_value() if self.openai_api_key else None - ), + "api_key": api_key_value, "organization": self.openai_organization, "base_url": self.openai_api_base, "timeout": self.request_timeout, @@ -707,29 +742,29 @@ class OpenAI(BaseOpenAI): ``` Key init args β€” completion params: - model: str + model: Name of OpenAI model to use. - temperature: float + temperature: Sampling temperature. - max_tokens: int | None + max_tokens: Max number of tokens to generate. - logprobs: bool | None + logprobs: Whether to return logprobs. - stream_options: Dict + stream_options: Configure streaming outputs, like whether to return token usage when streaming (`{"include_usage": True}`). Key init args β€” client params: - timeout: Union[float, Tuple[float, float], Any, None] + timeout: Timeout for requests. - max_retries: int + max_retries: Max number of retries. - api_key: str | None + api_key: OpenAI API key. If not passed in will be read from env var `OPENAI_API_KEY`. - base_url: str | None + base_url: Base URL for API requests. Only specify if using a proxy or service emulator. - organization: str | None + organization: OpenAI organization ID. If not passed in will be read from env var `OPENAI_ORG_ID`. @@ -739,7 +774,7 @@ class OpenAI(BaseOpenAI): ```python from langchain_openai import OpenAI - llm = OpenAI( + model = OpenAI( model="gpt-3.5-turbo-instruct", temperature=0, max_retries=2, @@ -753,7 +788,7 @@ class OpenAI(BaseOpenAI): Invoke: ```python input_text = "The meaning of life is " - llm.invoke(input_text) + model.invoke(input_text) ``` ```txt "a philosophical question that has been debated by thinkers and scholars for centuries." @@ -761,7 +796,7 @@ class OpenAI(BaseOpenAI): Stream: ```python - for chunk in llm.stream(input_text): + for chunk in model.stream(input_text): print(chunk, end="|") ``` ```txt @@ -769,7 +804,7 @@ class OpenAI(BaseOpenAI): ``` ```python - "".join(llm.stream(input_text)) + "".join(model.stream(input_text)) ``` ```txt "a philosophical question that has been debated by thinkers and scholars for centuries." @@ -777,14 +812,14 @@ class OpenAI(BaseOpenAI): Async: ```python - await llm.ainvoke(input_text) + await model.ainvoke(input_text) # stream: - # async for chunk in (await llm.astream(input_text)): + # async for chunk in (await model.astream(input_text)): # print(chunk) # batch: - # await llm.abatch([input_text]) + # await model.abatch([input_text]) ``` ```txt "a philosophical question that has been debated by thinkers and scholars for centuries." @@ -793,7 +828,11 @@ class OpenAI(BaseOpenAI): @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object.""" + """Get the namespace of the LangChain object. + + Returns: + `["langchain", "llms", "openai"]` + """ return ["langchain", "llms", "openai"] @classmethod diff --git a/libs/partners/openai/langchain_openai/middleware/__init__.py b/libs/partners/openai/langchain_openai/middleware/__init__.py new file mode 100644 index 00000000000..d36d0b85a9b --- /dev/null +++ b/libs/partners/openai/langchain_openai/middleware/__init__.py @@ -0,0 +1,8 @@ +"""Middleware implementations for OpenAI-backed agents.""" + +from .openai_moderation import OpenAIModerationError, OpenAIModerationMiddleware + +__all__ = [ + "OpenAIModerationError", + "OpenAIModerationMiddleware", +] diff --git a/libs/partners/openai/langchain_openai/middleware/openai_moderation.py b/libs/partners/openai/langchain_openai/middleware/openai_moderation.py new file mode 100644 index 00000000000..e6296243bcd --- /dev/null +++ b/libs/partners/openai/langchain_openai/middleware/openai_moderation.py @@ -0,0 +1,484 @@ +"""Agent middleware that integrates OpenAI's moderation endpoint.""" + +from __future__ import annotations + +import json +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, Literal, cast + +from langchain.agents.middleware.types import AgentMiddleware, AgentState, hook_config +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, ToolMessage +from openai import AsyncOpenAI, OpenAI +from openai.types import Moderation, ModerationModel + +if TYPE_CHECKING: # pragma: no cover + from langgraph.runtime import Runtime + +ViolationStage = Literal["input", "output", "tool"] + +DEFAULT_VIOLATION_TEMPLATE = ( + "I'm sorry, but I can't comply with that request. It was flagged for {categories}." +) + + +class OpenAIModerationError(RuntimeError): + """Raised when OpenAI flags content and `exit_behavior` is set to ``"error"``.""" + + def __init__( + self, + *, + content: str, + stage: ViolationStage, + result: Moderation, + message: str, + ) -> None: + """Initialize the error with violation details. + + Args: + content: The content that was flagged. + stage: The stage where the violation occurred. + result: The moderation result from OpenAI. + message: The error message. + """ + super().__init__(message) + self.content = content + self.stage = stage + self.result = result + + +class OpenAIModerationMiddleware(AgentMiddleware[AgentState[Any], Any]): + """Moderate agent traffic using OpenAI's moderation endpoint.""" + + def __init__( + self, + *, + model: ModerationModel = "omni-moderation-latest", + check_input: bool = True, + check_output: bool = True, + check_tool_results: bool = False, + exit_behavior: Literal["error", "end", "replace"] = "end", + violation_message: str | None = None, + client: OpenAI | None = None, + async_client: AsyncOpenAI | None = None, + ) -> None: + """Create the middleware instance. + + Args: + model: OpenAI moderation model to use. + check_input: Whether to check user input messages. + check_output: Whether to check model output messages. + check_tool_results: Whether to check tool result messages. + exit_behavior: How to handle violations + (`'error'`, `'end'`, or `'replace'`). + violation_message: Custom template for violation messages. + client: Optional pre-configured OpenAI client to reuse. + If not provided, a new client will be created. + async_client: Optional pre-configured AsyncOpenAI client to reuse. + If not provided, a new async client will be created. + """ + super().__init__() + self.model = model + self.check_input = check_input + self.check_output = check_output + self.check_tool_results = check_tool_results + self.exit_behavior = exit_behavior + self.violation_message = violation_message + + self._client = client + self._async_client = async_client + + @hook_config(can_jump_to=["end"]) + def before_model( + self, state: AgentState[Any], runtime: Runtime[Any] + ) -> dict[str, Any] | None: # type: ignore[override] + """Moderate user input and tool results before the model is called. + + Args: + state: Current agent state containing messages. + runtime: Agent runtime context. + + Returns: + Updated state with moderated messages, or `None` if no changes. + """ + if not self.check_input and not self.check_tool_results: + return None + + messages = list(state.get("messages", [])) + if not messages: + return None + + return self._moderate_inputs(messages) + + @hook_config(can_jump_to=["end"]) + def after_model( + self, state: AgentState[Any], runtime: Runtime[Any] + ) -> dict[str, Any] | None: # type: ignore[override] + """Moderate model output after the model is called. + + Args: + state: Current agent state containing messages. + runtime: Agent runtime context. + + Returns: + Updated state with moderated messages, or `None` if no changes. + """ + if not self.check_output: + return None + + messages = list(state.get("messages", [])) + if not messages: + return None + + return self._moderate_output(messages) + + @hook_config(can_jump_to=["end"]) + async def abefore_model( + self, state: AgentState[Any], runtime: Runtime[Any] + ) -> dict[str, Any] | None: # type: ignore[override] + """Async version of before_model. + + Args: + state: Current agent state containing messages. + runtime: Agent runtime context. + + Returns: + Updated state with moderated messages, or `None` if no changes. + """ + if not self.check_input and not self.check_tool_results: + return None + + messages = list(state.get("messages", [])) + if not messages: + return None + + return await self._amoderate_inputs(messages) + + @hook_config(can_jump_to=["end"]) + async def aafter_model( + self, state: AgentState[Any], runtime: Runtime[Any] + ) -> dict[str, Any] | None: # type: ignore[override] + """Async version of after_model. + + Args: + state: Current agent state containing messages. + runtime: Agent runtime context. + + Returns: + Updated state with moderated messages, or `None` if no changes. + """ + if not self.check_output: + return None + + messages = list(state.get("messages", [])) + if not messages: + return None + + return await self._amoderate_output(messages) + + def _moderate_inputs( + self, messages: Sequence[BaseMessage] + ) -> dict[str, Any] | None: + working = list(messages) + modified = False + + if self.check_tool_results: + action = self._moderate_tool_messages(working) + if action: + if "jump_to" in action: + return action + working = cast("list[BaseMessage]", action["messages"]) + modified = True + + if self.check_input: + action = self._moderate_user_message(working) + if action: + if "jump_to" in action: + return action + working = cast("list[BaseMessage]", action["messages"]) + modified = True + + if modified: + return {"messages": working} + + return None + + async def _amoderate_inputs( + self, messages: Sequence[BaseMessage] + ) -> dict[str, Any] | None: + working = list(messages) + modified = False + + if self.check_tool_results: + action = await self._amoderate_tool_messages(working) + if action: + if "jump_to" in action: + return action + working = cast("list[BaseMessage]", action["messages"]) + modified = True + + if self.check_input: + action = await self._amoderate_user_message(working) + if action: + if "jump_to" in action: + return action + working = cast("list[BaseMessage]", action["messages"]) + modified = True + + if modified: + return {"messages": working} + + return None + + def _moderate_output( + self, messages: Sequence[BaseMessage] + ) -> dict[str, Any] | None: + last_ai_idx = self._find_last_index(messages, AIMessage) + if last_ai_idx is None: + return None + + ai_message = messages[last_ai_idx] + text = self._extract_text(ai_message) + if not text: + return None + + result = self._moderate(text) + if not result.flagged: + return None + + return self._apply_violation( + messages, index=last_ai_idx, stage="output", content=text, result=result + ) + + async def _amoderate_output( + self, messages: Sequence[BaseMessage] + ) -> dict[str, Any] | None: + last_ai_idx = self._find_last_index(messages, AIMessage) + if last_ai_idx is None: + return None + + ai_message = messages[last_ai_idx] + text = self._extract_text(ai_message) + if not text: + return None + + result = await self._amoderate(text) + if not result.flagged: + return None + + return self._apply_violation( + messages, index=last_ai_idx, stage="output", content=text, result=result + ) + + def _moderate_tool_messages( + self, messages: Sequence[BaseMessage] + ) -> dict[str, Any] | None: + last_ai_idx = self._find_last_index(messages, AIMessage) + if last_ai_idx is None: + return None + + working = list(messages) + modified = False + + for idx in range(last_ai_idx + 1, len(working)): + msg = working[idx] + if not isinstance(msg, ToolMessage): + continue + + text = self._extract_text(msg) + if not text: + continue + + result = self._moderate(text) + if not result.flagged: + continue + + action = self._apply_violation( + working, index=idx, stage="tool", content=text, result=result + ) + if action: + if "jump_to" in action: + return action + working = cast("list[BaseMessage]", action["messages"]) + modified = True + + if modified: + return {"messages": working} + + return None + + async def _amoderate_tool_messages( + self, messages: Sequence[BaseMessage] + ) -> dict[str, Any] | None: + last_ai_idx = self._find_last_index(messages, AIMessage) + if last_ai_idx is None: + return None + + working = list(messages) + modified = False + + for idx in range(last_ai_idx + 1, len(working)): + msg = working[idx] + if not isinstance(msg, ToolMessage): + continue + + text = self._extract_text(msg) + if not text: + continue + + result = await self._amoderate(text) + if not result.flagged: + continue + + action = self._apply_violation( + working, index=idx, stage="tool", content=text, result=result + ) + if action: + if "jump_to" in action: + return action + working = cast("list[BaseMessage]", action["messages"]) + modified = True + + if modified: + return {"messages": working} + + return None + + def _moderate_user_message( + self, messages: Sequence[BaseMessage] + ) -> dict[str, Any] | None: + idx = self._find_last_index(messages, HumanMessage) + if idx is None: + return None + + message = messages[idx] + text = self._extract_text(message) + if not text: + return None + + result = self._moderate(text) + if not result.flagged: + return None + + return self._apply_violation( + messages, index=idx, stage="input", content=text, result=result + ) + + async def _amoderate_user_message( + self, messages: Sequence[BaseMessage] + ) -> dict[str, Any] | None: + idx = self._find_last_index(messages, HumanMessage) + if idx is None: + return None + + message = messages[idx] + text = self._extract_text(message) + if not text: + return None + + result = await self._amoderate(text) + if not result.flagged: + return None + + return self._apply_violation( + messages, index=idx, stage="input", content=text, result=result + ) + + def _apply_violation( + self, + messages: Sequence[BaseMessage], + *, + index: int | None, + stage: ViolationStage, + content: str, + result: Moderation, + ) -> dict[str, Any] | None: + violation_text = self._format_violation_message(content, result) + + if self.exit_behavior == "error": + raise OpenAIModerationError( + content=content, + stage=stage, + result=result, + message=violation_text, + ) + + if self.exit_behavior == "end": + return {"jump_to": "end", "messages": [AIMessage(content=violation_text)]} + + if index is None: + return None + + new_messages = list(messages) + original = new_messages[index] + new_messages[index] = cast( + BaseMessage, original.model_copy(update={"content": violation_text}) + ) + return {"messages": new_messages} + + def _moderate(self, text: str) -> Moderation: + if self._client is None: + self._client = self._build_client() + response = self._client.moderations.create(model=self.model, input=text) + return response.results[0] + + async def _amoderate(self, text: str) -> Moderation: + if self._async_client is None: + self._async_client = self._build_async_client() + response = await self._async_client.moderations.create( + model=self.model, input=text + ) + return response.results[0] + + def _build_client(self) -> OpenAI: + self._client = OpenAI() + return self._client + + def _build_async_client(self) -> AsyncOpenAI: + self._async_client = AsyncOpenAI() + return self._async_client + + def _format_violation_message(self, content: str, result: Moderation) -> str: + # Convert categories to dict and filter for flagged items + categories_dict = result.categories.model_dump() + categories = [ + name.replace("_", " ") + for name, flagged in categories_dict.items() + if flagged + ] + category_label = ( + ", ".join(categories) if categories else "OpenAI's safety policies" + ) + template = self.violation_message or DEFAULT_VIOLATION_TEMPLATE + scores_json = json.dumps(result.category_scores.model_dump(), sort_keys=True) + try: + message = template.format( + categories=category_label, + category_scores=scores_json, + original_content=content, + ) + except KeyError: + message = template + return message + + def _find_last_index( + self, messages: Sequence[BaseMessage], message_type: type[BaseMessage] + ) -> int | None: + for idx in range(len(messages) - 1, -1, -1): + if isinstance(messages[idx], message_type): + return idx + return None + + def _extract_text(self, message: BaseMessage) -> str | None: + if message.content is None: + return None + text_accessor = getattr(message, "text", None) + if text_accessor is None: + return str(message.content) + text = str(text_accessor) + return text if text else None + + +__all__ = [ + "OpenAIModerationError", + "OpenAIModerationMiddleware", +] diff --git a/libs/partners/openai/langchain_openai/tools/custom_tool.py b/libs/partners/openai/langchain_openai/tools/custom_tool.py index 0724d56277c..bf8b1ad1bcf 100644 --- a/libs/partners/openai/langchain_openai/tools/custom_tool.py +++ b/libs/partners/openai/langchain_openai/tools/custom_tool.py @@ -38,9 +38,9 @@ def custom_tool(*args: Any, **kwargs: Any) -> Any: return "27" - llm = ChatOpenAI(model="gpt-5", output_version="responses/v1") + model = ChatOpenAI(model="gpt-5", output_version="responses/v1") - agent = create_react_agent(llm, [execute_code]) + agent = create_react_agent(model, [execute_code]) input_message = {"role": "user", "content": "Use the tool to calculate 3^3."} for step in agent.stream( @@ -79,9 +79,9 @@ def custom_tool(*args: Any, **kwargs: Any) -> Any: return "27" - llm = ChatOpenAI(model="gpt-5", output_version="responses/v1") + model = ChatOpenAI(model="gpt-5", output_version="responses/v1") - agent = create_react_agent(llm, [do_math]) + agent = create_react_agent(model, [do_math]) input_message = {"role": "user", "content": "Use the tool to calculate 3^3."} for step in agent.stream( diff --git a/libs/partners/openai/pyproject.toml b/libs/partners/openai/pyproject.toml index 124d6e17b3f..362f6db73b4 100644 --- a/libs/partners/openai/pyproject.toml +++ b/libs/partners/openai/pyproject.toml @@ -3,27 +3,28 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-openai" +description = "An integration package connecting OpenAI and LangChain" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.2" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.2,<2.0.0", "openai>=1.109.1,<3.0.0", "tiktoken>=0.7.0,<1.0.0", ] -name = "langchain-openai" -version = "1.0.0a4" -description = "An integration package connecting OpenAI and LangChain" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/openai" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/openai" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-openai%22" -docs = "https://reference.langchain.com/python/integrations/langchain_openai/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/openai" +Documentation = "https://reference.langchain.com/python/integrations/langchain_openai/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/openai" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-openai%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ @@ -40,6 +41,7 @@ test = [ "vcrpy>=7.0.0,<8.0.0", "numpy>=1.26.4; python_version<'3.13'", "numpy>=2.1.0; python_version>='3.13'", + "langchain", "langchain-core", "langchain-tests", ] @@ -47,7 +49,7 @@ lint = ["ruff>=0.13.1,<0.14.0"] dev = ["langchain-core"] test_integration = [ "httpx>=0.27.0,<1.0.0", - "pillow>=10.3.0,<11.0.0", + "pillow>=10.3.0,<12.0.0", "numpy>=1.26.4; python_version < '3.13'", "numpy>=2.1.0; python_version >= '3.13'", ] @@ -60,6 +62,7 @@ typing = [ [tool.uv.sources] langchain-core = { path = "../../core", editable = true } langchain-tests = { path = "../../standard-tests", editable = true } +langchain = { path = "../../langchain_v1", editable = true } [tool.mypy] disallow_untyped_defs = "True" diff --git a/libs/partners/openai/tests/cassettes/test_incomplete_response.yaml.gz b/libs/partners/openai/tests/cassettes/test_incomplete_response.yaml.gz new file mode 100644 index 00000000000..f9ac5b9da95 Binary files /dev/null and b/libs/partners/openai/tests/cassettes/test_incomplete_response.yaml.gz differ diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py b/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py index da245c1c025..e34cf1d207e 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py @@ -116,7 +116,7 @@ def test_chat_openai_streaming_generation_info() -> None: chat = _get_llm(max_tokens=2, temperature=0, callbacks=callback_manager) list(chat.stream("hi")) generation = callback.saved_things["generation"] - # `Hello!` is two tokens, assert that that is what is returned + # `Hello!` is two tokens, assert that is what is returned assert generation.generations[0][0].text == "Hello!" diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_base.py b/libs/partners/openai/tests/integration_tests/chat_models/test_base.py index 3aad2f0988d..01214fff790 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_base.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_base.py @@ -2,6 +2,7 @@ import base64 import json +import os from collections.abc import AsyncIterator from pathlib import Path from textwrap import dedent @@ -64,6 +65,57 @@ def test_chat_openai_model() -> None: assert chat.model_name == "bar" +def test_callable_api_key(monkeypatch: pytest.MonkeyPatch) -> None: + original_key = os.environ["OPENAI_API_KEY"] + + calls = {"sync": 0} + + def get_openai_api_key() -> str: + calls["sync"] += 1 + return original_key + + monkeypatch.delenv("OPENAI_API_KEY") + + model = ChatOpenAI(model="gpt-4.1-mini", api_key=get_openai_api_key) + response = model.invoke("hello") + assert isinstance(response, AIMessage) + assert calls["sync"] == 1 + + +async def test_callable_api_key_async(monkeypatch: pytest.MonkeyPatch) -> None: + original_key = os.environ["OPENAI_API_KEY"] + + calls = {"sync": 0, "async": 0} + + def get_openai_api_key() -> str: + calls["sync"] += 1 + return original_key + + async def get_openai_api_key_async() -> str: + calls["async"] += 1 + return original_key + + monkeypatch.delenv("OPENAI_API_KEY") + + model = ChatOpenAI(model="gpt-4.1-mini", api_key=get_openai_api_key) + response = model.invoke("hello") + assert isinstance(response, AIMessage) + assert calls["sync"] == 1 + + response = await model.ainvoke("hello") + assert isinstance(response, AIMessage) + assert calls["sync"] == 2 + + model = ChatOpenAI(model="gpt-4.1-mini", api_key=get_openai_api_key_async) + async_response = await model.ainvoke("hello") + assert isinstance(async_response, AIMessage) + assert calls["async"] == 1 + + with pytest.raises(ValueError): + # We do not create a sync callable from an async one + _ = model.invoke("hello") + + @pytest.mark.parametrize("use_responses_api", [False, True]) def test_chat_openai_system_message(use_responses_api: bool) -> None: """Test ChatOpenAI wrapper with system message.""" @@ -141,7 +193,7 @@ def test_chat_openai_streaming_generation_info() -> None: chat = ChatOpenAI(max_tokens=2, temperature=0, callbacks=callback_manager) # type: ignore[call-arg] list(chat.stream("hi")) generation = callback.saved_things["generation"] - # `Hello!` is two tokens, assert that that is what is returned + # `Hello!` is two tokens, assert that is what is returned assert generation.generations[0][0].text == "Hello!" @@ -194,13 +246,35 @@ def test_openai_invoke() -> None: result = llm.invoke("Hello", config={"tags": ["foo"]}) assert isinstance(result.content, str) + usage_metadata = result.usage_metadata # type: ignore[attr-defined] + # assert no response headers if include_response_headers is not set assert "headers" not in result.response_metadata + assert usage_metadata is not None + flex_input = usage_metadata.get("input_token_details", {}).get("flex") + assert isinstance(flex_input, int) + assert flex_input > 0 + assert flex_input == usage_metadata.get("input_tokens") + flex_output = usage_metadata.get("output_token_details", {}).get("flex") + assert isinstance(flex_output, int) + assert flex_output > 0 + # GPT-5-nano/reasoning model specific. Remove if model used in test changes. + flex_reasoning = usage_metadata.get("output_token_details", {}).get( + "flex_reasoning" + ) + assert isinstance(flex_reasoning, int) + assert flex_reasoning > 0 + assert flex_reasoning + flex_output == usage_metadata.get("output_tokens") +@pytest.mark.flaky(retries=3, delay=1) def test_stream() -> None: """Test streaming tokens from OpenAI.""" - llm = ChatOpenAI(model="gpt-4.1-mini") + llm = ChatOpenAI( + model="gpt-5-nano", + service_tier="flex", # Also test service_tier + max_retries=3, # Add retries for 503 capacity errors + ) full: BaseMessageChunk | None = None for chunk in llm.stream("I'm Pickle Rick"): @@ -236,6 +310,19 @@ def test_stream() -> None: assert aggregate.usage_metadata["input_tokens"] > 0 assert aggregate.usage_metadata["output_tokens"] > 0 assert aggregate.usage_metadata["total_tokens"] > 0 + assert aggregate.usage_metadata.get("input_token_details", {}).get("flex", 0) > 0 # type: ignore[operator] + assert aggregate.usage_metadata.get("output_token_details", {}).get("flex", 0) > 0 # type: ignore[operator] + assert ( + aggregate.usage_metadata.get("output_token_details", {}).get( # type: ignore[operator] + "flex_reasoning", 0 + ) + > 0 + ) + assert aggregate.usage_metadata.get("output_token_details", {}).get( # type: ignore[operator] + "flex_reasoning", 0 + ) + aggregate.usage_metadata.get("output_token_details", {}).get( + "flex", 0 + ) == aggregate.usage_metadata.get("output_tokens") async def test_astream() -> None: @@ -308,6 +395,28 @@ async def test_astream() -> None: await _test_stream(llm.astream("Hello", stream_usage=False), expect_usage=False) +@pytest.mark.parametrize("streaming", [False, True]) +def test_flex_usage_responses(streaming: bool) -> None: + llm = ChatOpenAI( + model="gpt-5-nano", + service_tier="flex", + max_retries=3, + use_responses_api=True, + streaming=streaming, + ) + result = llm.invoke("Hello") + assert result.usage_metadata + flex_input = result.usage_metadata.get("input_token_details", {}).get("flex") + flex_output = result.usage_metadata.get("output_token_details", {}).get("flex") + flex_reasoning = result.usage_metadata.get("output_token_details", {}).get( + "flex_reasoning" + ) + assert isinstance(flex_input, int) + assert isinstance(flex_output, int) + assert isinstance(flex_reasoning, int) + assert flex_output + flex_reasoning == result.usage_metadata.get("output_tokens") + + async def test_abatch_tags() -> None: """Test batch tokens from ChatOpenAI.""" llm = ChatOpenAI() @@ -600,7 +709,7 @@ async def test_openai_response_headers_async(use_responses_api: bool) -> None: def test_image_token_counting_jpeg() -> None: model = ChatOpenAI(model="gpt-4o", temperature=0) - image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + image_url = "https://raw.githubusercontent.com/langchain-ai/docs/9f99bb977307a1bd5efeb8dc6b67eb13904c4af1/src/oss/images/checkpoints.jpg" message = HumanMessage( content=[ {"type": "text", "text": "describe the weather in this image"}, @@ -632,7 +741,7 @@ def test_image_token_counting_jpeg() -> None: def test_image_token_counting_png() -> None: model = ChatOpenAI(model="gpt-4o", temperature=0) - image_url = "https://upload.wikimedia.org/wikipedia/commons/4/47/PNG_transparency_demonstration_1.png" + image_url = "https://raw.githubusercontent.com/langchain-ai/docs/4d11d08b6b0e210bd456943f7a22febbd168b543/src/images/agentic-rag-output.png" message = HumanMessage( content=[ {"type": "text", "text": "how many dice are in this image"}, @@ -894,6 +1003,7 @@ def test_audio_input_modality() -> None: assert "audio" in output.additional_kwargs +@pytest.mark.flaky(retries=3, delay=1) def test_prediction_tokens() -> None: code = dedent( """ diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_base_standard.py b/libs/partners/openai/tests/integration_tests/chat_models/test_base_standard.py index 302b8ab0327..c49d5a40e1f 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_base_standard.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_base_standard.py @@ -75,7 +75,7 @@ class TestOpenAIStandard(ChatModelIntegrationTests): return _invoke(llm, input_, stream) def invoke_with_reasoning_output(self, *, stream: bool = False) -> AIMessage: - llm = ChatOpenAI(model="o1-mini", stream_usage=True, temperature=1) + llm = ChatOpenAI(model="gpt-5-nano", reasoning_effort="medium") input_ = ( "explain the relationship between the 2008/9 economic crisis and the " "startup ecosystem in the early 2010s" @@ -88,6 +88,7 @@ class TestOpenAIStandard(ChatModelIntegrationTests): # For now, we test with filename in OpenAI-specific tests return False + @pytest.mark.flaky(retries=3, delay=1) def test_openai_pdf_inputs(self, model: BaseChatModel) -> None: """Test that the model can process PDF inputs.""" url = "https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf" diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_responses_api.py b/libs/partners/openai/tests/integration_tests/chat_models/test_responses_api.py index 8310adc6ed8..db18dcf046f 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_responses_api.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_responses_api.py @@ -18,6 +18,7 @@ from pydantic import BaseModel from typing_extensions import TypedDict from langchain_openai import ChatOpenAI, custom_tool +from langchain_openai.chat_models.base import _convert_to_openai_response_format MODEL_NAME = "gpt-4o-mini" @@ -57,6 +58,26 @@ def _check_response(response: BaseMessage | None) -> None: assert response.response_metadata["service_tier"] # type: ignore[typeddict-item] +@pytest.mark.vcr +def test_incomplete_response() -> None: + model = ChatOpenAI( + model=MODEL_NAME, use_responses_api=True, max_completion_tokens=16 + ) + response = model.invoke("Tell me a 100 word story about a bear.") + assert response.response_metadata["incomplete_details"] + assert response.response_metadata["incomplete_details"]["reason"] + assert response.response_metadata["status"] == "incomplete" + + full: AIMessageChunk | None = None + for chunk in model.stream("Tell me a 100 word story about a bear."): + assert isinstance(chunk, AIMessageChunk) + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert full.response_metadata["incomplete_details"] + assert full.response_metadata["incomplete_details"]["reason"] + assert full.response_metadata["status"] == "incomplete" + + @pytest.mark.default_cassette("test_web_search.yaml.gz") @pytest.mark.vcr @pytest.mark.parametrize("output_version", ["responses/v1", "v1"]) @@ -247,24 +268,30 @@ def test_parsed_dict_schema(schema: Any) -> None: def test_parsed_strict() -> None: llm = ChatOpenAI(model=MODEL_NAME, use_responses_api=True) - class InvalidJoke(TypedDict): + class Joke(TypedDict): setup: Annotated[str, ..., "The setup of the joke"] punchline: Annotated[str, None, "The punchline of the joke"] + schema = _convert_to_openai_response_format(Joke) + invalid_schema = cast(dict, _convert_to_openai_response_format(Joke, strict=True)) + invalid_schema["json_schema"]["schema"]["required"] = ["setup"] # make invalid + # Test not strict - response = llm.invoke("Tell me a joke", response_format=InvalidJoke) + response = llm.invoke("Tell me a joke", response_format=schema) parsed = json.loads(response.text) assert parsed == response.additional_kwargs["parsed"] # Test strict with pytest.raises(openai.BadRequestError): llm.invoke( - "Tell me a joke about cats.", response_format=InvalidJoke, strict=True + "Tell me a joke about cats.", response_format=invalid_schema, strict=True ) with pytest.raises(openai.BadRequestError): next( llm.stream( - "Tell me a joke about cats.", response_format=InvalidJoke, strict=True + "Tell me a joke about cats.", + response_format=invalid_schema, + strict=True, ) ) @@ -380,6 +407,10 @@ def test_computer_calls() -> None: def test_file_search( output_version: Literal["responses/v1", "v1"], ) -> None: + vector_store_id = os.getenv("OPENAI_VECTOR_STORE_ID") + if not vector_store_id: + pytest.skip() + llm = ChatOpenAI( model=MODEL_NAME, use_responses_api=True, @@ -387,7 +418,7 @@ def test_file_search( ) tool = { "type": "file_search", - "vector_store_ids": [os.environ["OPENAI_VECTOR_STORE_ID"]], + "vector_store_ids": [vector_store_id], } input_message = {"role": "user", "content": "What is deep research by OpenAI?"} diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_responses_standard.py b/libs/partners/openai/tests/integration_tests/chat_models/test_responses_standard.py index 565b88fa95a..532f0717901 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_responses_standard.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_responses_standard.py @@ -54,6 +54,7 @@ class TestOpenAIResponses(TestOpenAIStandard): input_ = "What was the 3rd highest building in 2000?" return _invoke(llm, input_, stream) + @pytest.mark.flaky(retries=3, delay=1) def test_openai_pdf_inputs(self, model: BaseChatModel) -> None: """Test that the model can process PDF inputs.""" super().test_openai_pdf_inputs(model) diff --git a/libs/partners/openai/tests/integration_tests/embeddings/test_base.py b/libs/partners/openai/tests/integration_tests/embeddings/test_base.py index 321edcfc0fb..95a03859455 100644 --- a/libs/partners/openai/tests/integration_tests/embeddings/test_base.py +++ b/libs/partners/openai/tests/integration_tests/embeddings/test_base.py @@ -1,7 +1,10 @@ """Test OpenAI embeddings.""" +import os + import numpy as np import openai +import pytest from langchain_openai.embeddings.base import OpenAIEmbeddings @@ -67,3 +70,56 @@ def test_langchain_openai_embeddings_dimensions_large_num() -> None: output = embedding.embed_documents(documents) assert len(output) == 2000 assert len(output[0]) == 128 + + +def test_callable_api_key(monkeypatch: pytest.MonkeyPatch) -> None: + original_key = os.environ["OPENAI_API_KEY"] + + calls = {"sync": 0} + + def get_openai_api_key() -> str: + calls["sync"] += 1 + return original_key + + monkeypatch.delenv("OPENAI_API_KEY") + + model = OpenAIEmbeddings( + model="text-embedding-3-small", dimensions=128, api_key=get_openai_api_key + ) + _ = model.embed_query("hello") + assert calls["sync"] == 1 + + +async def test_callable_api_key_async(monkeypatch: pytest.MonkeyPatch) -> None: + original_key = os.environ["OPENAI_API_KEY"] + + calls = {"sync": 0, "async": 0} + + def get_openai_api_key() -> str: + calls["sync"] += 1 + return original_key + + async def get_openai_api_key_async() -> str: + calls["async"] += 1 + return original_key + + monkeypatch.delenv("OPENAI_API_KEY") + + model = OpenAIEmbeddings( + model="text-embedding-3-small", dimensions=128, api_key=get_openai_api_key + ) + _ = model.embed_query("hello") + assert calls["sync"] == 1 + + _ = await model.aembed_query("hello") + assert calls["sync"] == 2 + + model = OpenAIEmbeddings( + model="text-embedding-3-small", dimensions=128, api_key=get_openai_api_key_async + ) + _ = await model.aembed_query("hello") + assert calls["async"] == 1 + + with pytest.raises(ValueError): + # We do not create a sync callable from an async one + _ = model.embed_query("hello") diff --git a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py index 8f9b16a0701..9dd15931c4d 100644 --- a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py +++ b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py @@ -3,6 +3,7 @@ from __future__ import annotations import json +import warnings from functools import partial from types import TracebackType from typing import Any, Literal, cast @@ -257,8 +258,8 @@ def test__convert_dict_to_message_tool_call() -> None: error=( "Function GenerateUsername arguments:\n\noops\n\nare not " "valid JSON. Received JSONDecodeError Expecting value: line 1 " - "column 1 (char 0)\nFor troubleshooting, visit: https://python" - ".langchain.com/docs/troubleshooting/errors/OUTPUT_PARSING_FAILURE " + "column 1 (char 0)\nFor troubleshooting, visit: https://docs" + ".langchain.com/oss/python/langchain/errors/OUTPUT_PARSING_FAILURE " ), type="invalid_tool_call", ) @@ -1199,14 +1200,18 @@ def test__get_request_payload() -> None: def test_init_o1() -> None: - with pytest.warns(None) as record: # type: ignore[call-overload] + with warnings.catch_warnings(record=True) as record: + warnings.simplefilter("error") # Treat warnings as errors ChatOpenAI(model="o1", reasoning_effort="medium") + assert len(record) == 0 def test_init_minimal_reasoning_effort() -> None: - with pytest.warns(None) as record: # type: ignore[call-overload] + with warnings.catch_warnings(record=True) as record: + warnings.simplefilter("error") ChatOpenAI(model="gpt-5", reasoning_effort="minimal") + assert len(record) == 0 diff --git a/libs/partners/openai/tests/unit_tests/llms/test_base.py b/libs/partners/openai/tests/unit_tests/llms/test_base.py index f0b027f9b7b..bae96ab7ca3 100644 --- a/libs/partners/openai/tests/unit_tests/llms/test_base.py +++ b/libs/partners/openai/tests/unit_tests/llms/test_base.py @@ -108,3 +108,13 @@ def test_stream_response_to_generation_chunk() -> None: assert chunk == GenerationChunk( text="", generation_info={"finish_reason": None, "logprobs": None} ) + + +def test_generate_streaming_multiple_prompts_error() -> None: + """Ensures ValueError when streaming=True and multiple prompts.""" + llm = OpenAI(streaming=True) + + with pytest.raises( + ValueError, match="Cannot stream results with multiple prompts\\." + ): + llm._generate(["foo", "bar"]) diff --git a/libs/partners/openai/tests/unit_tests/middleware/__init__.py b/libs/partners/openai/tests/unit_tests/middleware/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/libs/partners/openai/tests/unit_tests/middleware/test_openai_moderation_middleware.py b/libs/partners/openai/tests/unit_tests/middleware/test_openai_moderation_middleware.py new file mode 100644 index 00000000000..ee4f05b264b --- /dev/null +++ b/libs/partners/openai/tests/unit_tests/middleware/test_openai_moderation_middleware.py @@ -0,0 +1,195 @@ +from __future__ import annotations + +from collections.abc import Mapping +from copy import deepcopy +from typing import Any, cast +from unittest.mock import Mock + +import pytest +from langchain.agents.middleware.types import AgentState +from langchain_core.messages import AIMessage, HumanMessage, ToolMessage +from openai.types.moderation import Moderation + +from langchain_openai.middleware.openai_moderation import ( + OpenAIModerationError, + OpenAIModerationMiddleware, +) + +DEFAULT_OK_DATA: dict[str, Any] = { + "flagged": False, + "categories": { + "harassment": False, + "harassment/threatening": False, + "hate": False, + "hate/threatening": False, + "illicit": False, + "illicit/violent": False, + "self-harm": False, + "self-harm/instructions": False, + "self-harm/intent": False, + "sexual": False, + "sexual/minors": False, + "violence": False, + "violence/graphic": False, + }, + "category_scores": { + "harassment": 0.0, + "harassment/threatening": 0.0, + "hate": 0.0, + "hate/threatening": 0.0, + "illicit": 0.0, + "illicit/violent": 0.0, + "self-harm": 0.0, + "self-harm/instructions": 0.0, + "self-harm/intent": 0.0, + "sexual": 0.0, + "sexual/minors": 0.0, + "violence": 0.0, + "violence/graphic": 0.0, + }, + "category_applied_input_types": { + "harassment": ["text"], + "harassment/threatening": ["text"], + "hate": ["text"], + "hate/threatening": ["text"], + "illicit": ["text"], + "illicit/violent": ["text"], + "self-harm": ["text"], + "self-harm/instructions": ["text"], + "self-harm/intent": ["text"], + "sexual": ["text"], + "sexual/minors": ["text"], + "violence": ["text"], + "violence/graphic": ["text"], + }, +} + +DEFAULT_OK = Moderation.model_validate(DEFAULT_OK_DATA) + + +def flagged_result() -> Moderation: + flagged_data = deepcopy(DEFAULT_OK_DATA) + flagged_data["flagged"] = True + flagged_data["categories"]["self-harm"] = True + flagged_data["category_scores"]["self-harm"] = 0.9 + return Moderation.model_validate(flagged_data) + + +class StubModerationMiddleware(OpenAIModerationMiddleware): + """Override OpenAI calls with deterministic fixtures.""" + + def __init__(self, decisions: Mapping[str, Moderation], **kwargs: Any) -> None: + super().__init__(**kwargs) + self._decisions = decisions + + def _moderate(self, text: str) -> Moderation: + return self._decisions.get(text, DEFAULT_OK) + + async def _amoderate(self, text: str) -> Moderation: + return self._moderate(text) + + +def make_state( + messages: list[AIMessage | HumanMessage | ToolMessage], +) -> AgentState[Any]: + return cast(AgentState[Any], {"messages": messages}) + + +def test_before_model_allows_clean_input() -> None: + middleware = StubModerationMiddleware({}, model="test") + state = make_state([HumanMessage(content="hello")]) + + assert middleware.before_model(state, Mock()) is None + + +def test_before_model_errors_on_flagged_input() -> None: + middleware = StubModerationMiddleware( + {"bad": flagged_result()}, model="test", exit_behavior="error" + ) + state = make_state([HumanMessage(content="bad")]) + + with pytest.raises(OpenAIModerationError) as exc: + middleware.before_model(state, Mock()) + + assert exc.value.result.flagged is True + assert exc.value.stage == "input" + + +def test_before_model_jump_on_end_behavior() -> None: + middleware = StubModerationMiddleware( + {"bad": flagged_result()}, model="test", exit_behavior="end" + ) + state = make_state([HumanMessage(content="bad")]) + + response = middleware.before_model(state, Mock()) + + assert response is not None + assert response["jump_to"] == "end" + ai_message = response["messages"][0] + assert isinstance(ai_message, AIMessage) + assert "flagged" in ai_message.content + + +def test_custom_violation_message_template() -> None: + middleware = StubModerationMiddleware( + {"bad": flagged_result()}, + model="test", + exit_behavior="end", + violation_message="Policy block: {categories}", + ) + state = make_state([HumanMessage(content="bad")]) + + response = middleware.before_model(state, Mock()) + + assert response is not None + assert response["messages"][0].content == "Policy block: self harm" + + +def test_after_model_replaces_flagged_message() -> None: + middleware = StubModerationMiddleware( + {"unsafe": flagged_result()}, model="test", exit_behavior="replace" + ) + state = make_state([AIMessage(content="unsafe", id="ai-1")]) + + response = middleware.after_model(state, Mock()) + assert response is not None + updated_messages = response["messages"] + assert isinstance(updated_messages[-1], AIMessage) + assert updated_messages[-1].id == "ai-1" + assert "flagged" in updated_messages[-1].content + + +def test_tool_messages_are_moderated_when_enabled() -> None: + middleware = StubModerationMiddleware( + {"dangerous": flagged_result()}, + model="test", + check_tool_results=True, + exit_behavior="replace", + ) + state = make_state( + [ + HumanMessage(content="question"), + AIMessage(content="call tool"), + ToolMessage(content="dangerous", tool_call_id="tool-1"), + ] + ) + + response = middleware.before_model(state, Mock()) + assert response is not None + updated_messages = response["messages"] + tool_message = updated_messages[-1] + assert isinstance(tool_message, ToolMessage) + assert tool_message.tool_call_id == "tool-1" + assert "flagged" in tool_message.content + + +@pytest.mark.asyncio +async def test_async_before_model_uses_async_moderation() -> None: + middleware = StubModerationMiddleware( + {"async": flagged_result()}, model="test", exit_behavior="end" + ) + state = make_state([HumanMessage(content="async")]) + + response = await middleware.abefore_model(state, Mock()) + assert response is not None + assert response["jump_to"] == "end" diff --git a/libs/partners/openai/tests/unit_tests/test_secrets.py b/libs/partners/openai/tests/unit_tests/test_secrets.py index aa1484058e0..27d69bed92c 100644 --- a/libs/partners/openai/tests/unit_tests/test_secrets.py +++ b/libs/partners/openai/tests/unit_tests/test_secrets.py @@ -187,6 +187,18 @@ def test_openai_uses_actual_secret_value_from_secretstr(model_class: type) -> No assert cast(SecretStr, model.openai_api_key).get_secret_value() == "secret-api-key" +@pytest.mark.parametrize("model_class", [ChatOpenAI, OpenAI, OpenAIEmbeddings]) +def test_openai_api_key_accepts_callable(model_class: type) -> None: + """Test that the API key can be passed as a callable.""" + + def get_api_key() -> str: + return "secret-api-key-from-callable" + + model = model_class(openai_api_key=get_api_key) + assert callable(model.openai_api_key) + assert model.openai_api_key() == "secret-api-key-from-callable" + + @pytest.mark.parametrize("model_class", [AzureChatOpenAI, AzureOpenAI]) def test_azure_serialized_secrets(model_class: type) -> None: """Test that the actual secret value is correctly retrieved.""" diff --git a/libs/partners/openai/uv.lock b/libs/partners/openai/uv.lock index 510f038545a..06f807d0c70 100644 --- a/libs/partners/openai/uv.lock +++ b/libs/partners/openai/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.10.0, <4.0.0" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -36,11 +36,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.8.3" +version = "2025.10.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, ] [[package]] @@ -127,66 +127,91 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.3" +version = "3.4.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, - { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, - { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, - { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, - { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, - { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, - { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, - { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, - { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, - { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, - { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, - { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, - { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, - { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, - { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, - { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, - { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, - { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, - { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, - { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, - { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, - { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, - { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, - { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, - { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, - { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, - { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, - { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, - { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, - { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, - { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, - { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, - { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, - { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, - { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, - { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, - { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, - { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, - { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, - { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, - { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, - { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, - { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, - { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, - { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, - { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, - { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, - { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, - { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, - { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, - { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, - { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, - { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, - { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, + { url = "https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d", size = 209709, upload-time = "2025-10-14T04:40:11.385Z" }, + { url = "https://files.pythonhosted.org/packages/5c/af/1f9d7f7faafe2ddfb6f72a2e07a548a629c61ad510fe60f9630309908fef/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8", size = 148814, upload-time = "2025-10-14T04:40:13.135Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/f2e3ac2bbc056ca0c204298ea4e3d9db9b4afe437812638759db2c976b5f/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad", size = 144467, upload-time = "2025-10-14T04:40:14.728Z" }, + { url = "https://files.pythonhosted.org/packages/ec/85/1bf997003815e60d57de7bd972c57dc6950446a3e4ccac43bc3070721856/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8", size = 162280, upload-time = "2025-10-14T04:40:16.14Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8e/6aa1952f56b192f54921c436b87f2aaf7c7a7c3d0d1a765547d64fd83c13/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d", size = 159454, upload-time = "2025-10-14T04:40:17.567Z" }, + { url = "https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313", size = 153609, upload-time = "2025-10-14T04:40:19.08Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/6a13396948b8fd3c4b4fd5bc74d045f5637d78c9675585e8e9fbe5636554/charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e", size = 151849, upload-time = "2025-10-14T04:40:20.607Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7a/59482e28b9981d105691e968c544cc0df3b7d6133152fb3dcdc8f135da7a/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93", size = 151586, upload-time = "2025-10-14T04:40:21.719Z" }, + { url = "https://files.pythonhosted.org/packages/92/59/f64ef6a1c4bdd2baf892b04cd78792ed8684fbc48d4c2afe467d96b4df57/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0", size = 145290, upload-time = "2025-10-14T04:40:23.069Z" }, + { url = "https://files.pythonhosted.org/packages/6b/63/3bf9f279ddfa641ffa1962b0db6a57a9c294361cc2f5fcac997049a00e9c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84", size = 163663, upload-time = "2025-10-14T04:40:24.17Z" }, + { url = "https://files.pythonhosted.org/packages/ed/09/c9e38fc8fa9e0849b172b581fd9803bdf6e694041127933934184e19f8c3/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e", size = 151964, upload-time = "2025-10-14T04:40:25.368Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d1/d28b747e512d0da79d8b6a1ac18b7ab2ecfd81b2944c4c710e166d8dd09c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db", size = 161064, upload-time = "2025-10-14T04:40:26.806Z" }, + { url = "https://files.pythonhosted.org/packages/bb/9a/31d62b611d901c3b9e5500c36aab0ff5eb442043fb3a1c254200d3d397d9/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6", size = 155015, upload-time = "2025-10-14T04:40:28.284Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/107e008fa2bff0c8b9319584174418e5e5285fef32f79d8ee6a430d0039c/charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f", size = 99792, upload-time = "2025-10-14T04:40:29.613Z" }, + { url = "https://files.pythonhosted.org/packages/eb/66/e396e8a408843337d7315bab30dbf106c38966f1819f123257f5520f8a96/charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d", size = 107198, upload-time = "2025-10-14T04:40:30.644Z" }, + { url = "https://files.pythonhosted.org/packages/b5/58/01b4f815bf0312704c267f2ccb6e5d42bcc7752340cd487bc9f8c3710597/charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69", size = 100262, upload-time = "2025-10-14T04:40:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, ] [[package]] @@ -200,101 +225,101 @@ wheels = [ [[package]] name = "coverage" -version = "7.10.7" +version = "7.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/38/ee22495420457259d2f3390309505ea98f98a5eed40901cf62196abad006/coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050", size = 811905, upload-time = "2025-10-15T15:15:08.542Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a", size = 217987, upload-time = "2025-09-21T20:00:57.218Z" }, - { url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5", size = 218388, upload-time = "2025-09-21T20:01:00.081Z" }, - { url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17", size = 245148, upload-time = "2025-09-21T20:01:01.768Z" }, - { url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b", size = 246958, upload-time = "2025-09-21T20:01:03.355Z" }, - { url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87", size = 248819, upload-time = "2025-09-21T20:01:04.968Z" }, - { url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e", size = 245754, upload-time = "2025-09-21T20:01:06.321Z" }, - { url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e", size = 246860, upload-time = "2025-09-21T20:01:07.605Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df", size = 244877, upload-time = "2025-09-21T20:01:08.829Z" }, - { url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0", size = 245108, upload-time = "2025-09-21T20:01:10.527Z" }, - { url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13", size = 245752, upload-time = "2025-09-21T20:01:11.857Z" }, - { url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b", size = 220497, upload-time = "2025-09-21T20:01:13.459Z" }, - { url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807", size = 221392, upload-time = "2025-09-21T20:01:14.722Z" }, - { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, - { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, - { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, - { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, - { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, - { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, - { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, - { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, - { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, - { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, - { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, - { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, - { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, - { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, - { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, - { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, - { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, - { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, - { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, - { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, - { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, - { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, - { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, - { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, - { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, - { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320, upload-time = "2025-09-21T20:01:56.629Z" }, - { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575, upload-time = "2025-09-21T20:01:58.203Z" }, - { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568, upload-time = "2025-09-21T20:01:59.748Z" }, - { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174, upload-time = "2025-09-21T20:02:01.192Z" }, - { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447, upload-time = "2025-09-21T20:02:02.701Z" }, - { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779, upload-time = "2025-09-21T20:02:04.185Z" }, - { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604, upload-time = "2025-09-21T20:02:06.034Z" }, - { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497, upload-time = "2025-09-21T20:02:07.619Z" }, - { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350, upload-time = "2025-09-21T20:02:10.34Z" }, - { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111, upload-time = "2025-09-21T20:02:12.122Z" }, - { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746, upload-time = "2025-09-21T20:02:13.919Z" }, - { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541, upload-time = "2025-09-21T20:02:15.57Z" }, - { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170, upload-time = "2025-09-21T20:02:17.395Z" }, - { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029, upload-time = "2025-09-21T20:02:18.936Z" }, - { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259, upload-time = "2025-09-21T20:02:20.44Z" }, - { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592, upload-time = "2025-09-21T20:02:22.313Z" }, - { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768, upload-time = "2025-09-21T20:02:24.287Z" }, - { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995, upload-time = "2025-09-21T20:02:26.133Z" }, - { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546, upload-time = "2025-09-21T20:02:27.716Z" }, - { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544, upload-time = "2025-09-21T20:02:29.216Z" }, - { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308, upload-time = "2025-09-21T20:02:31.226Z" }, - { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920, upload-time = "2025-09-21T20:02:32.823Z" }, - { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434, upload-time = "2025-09-21T20:02:34.86Z" }, - { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, - { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, - { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, - { url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520", size = 218302, upload-time = "2025-09-21T20:02:42.527Z" }, - { url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32", size = 218578, upload-time = "2025-09-21T20:02:44.468Z" }, - { url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f", size = 249629, upload-time = "2025-09-21T20:02:46.503Z" }, - { url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a", size = 252162, upload-time = "2025-09-21T20:02:48.689Z" }, - { url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360", size = 253517, upload-time = "2025-09-21T20:02:50.31Z" }, - { url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69", size = 249632, upload-time = "2025-09-21T20:02:51.971Z" }, - { url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14", size = 251520, upload-time = "2025-09-21T20:02:53.858Z" }, - { url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe", size = 249455, upload-time = "2025-09-21T20:02:55.807Z" }, - { url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e", size = 249287, upload-time = "2025-09-21T20:02:57.784Z" }, - { url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd", size = 250946, upload-time = "2025-09-21T20:02:59.431Z" }, - { url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2", size = 221009, upload-time = "2025-09-21T20:03:01.324Z" }, - { url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681", size = 221804, upload-time = "2025-09-21T20:03:03.4Z" }, - { url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880", size = 220384, upload-time = "2025-09-21T20:03:05.111Z" }, - { url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63", size = 219047, upload-time = "2025-09-21T20:03:06.795Z" }, - { url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2", size = 219266, upload-time = "2025-09-21T20:03:08.495Z" }, - { url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d", size = 260767, upload-time = "2025-09-21T20:03:10.172Z" }, - { url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0", size = 262931, upload-time = "2025-09-21T20:03:11.861Z" }, - { url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699", size = 265186, upload-time = "2025-09-21T20:03:13.539Z" }, - { url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9", size = 259470, upload-time = "2025-09-21T20:03:15.584Z" }, - { url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f", size = 262626, upload-time = "2025-09-21T20:03:17.673Z" }, - { url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1", size = 260386, upload-time = "2025-09-21T20:03:19.36Z" }, - { url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0", size = 258852, upload-time = "2025-09-21T20:03:21.007Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399", size = 261534, upload-time = "2025-09-21T20:03:23.12Z" }, - { url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235", size = 221784, upload-time = "2025-09-21T20:03:24.769Z" }, - { url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d", size = 222905, upload-time = "2025-09-21T20:03:26.93Z" }, - { url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a", size = 220922, upload-time = "2025-09-21T20:03:28.672Z" }, - { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, + { url = "https://files.pythonhosted.org/packages/12/95/c49df0aceb5507a80b9fe5172d3d39bf23f05be40c23c8d77d556df96cec/coverage-7.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb53f1e8adeeb2e78962bade0c08bfdc461853c7969706ed901821e009b35e31", size = 215800, upload-time = "2025-10-15T15:12:19.824Z" }, + { url = "https://files.pythonhosted.org/packages/dc/c6/7bb46ce01ed634fff1d7bb53a54049f539971862cc388b304ff3c51b4f66/coverage-7.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9a03ec6cb9f40a5c360f138b88266fd8f58408d71e89f536b4f91d85721d075", size = 216198, upload-time = "2025-10-15T15:12:22.549Z" }, + { url = "https://files.pythonhosted.org/packages/94/b2/75d9d8fbf2900268aca5de29cd0a0fe671b0f69ef88be16767cc3c828b85/coverage-7.11.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0d7f0616c557cbc3d1c2090334eddcbb70e1ae3a40b07222d62b3aa47f608fab", size = 242953, upload-time = "2025-10-15T15:12:24.139Z" }, + { url = "https://files.pythonhosted.org/packages/65/ac/acaa984c18f440170525a8743eb4b6c960ace2dbad80dc22056a437fc3c6/coverage-7.11.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e44a86a47bbdf83b0a3ea4d7df5410d6b1a0de984fbd805fa5101f3624b9abe0", size = 244766, upload-time = "2025-10-15T15:12:25.974Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0d/938d0bff76dfa4a6b228c3fc4b3e1c0e2ad4aa6200c141fcda2bd1170227/coverage-7.11.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:596763d2f9a0ee7eec6e643e29660def2eef297e1de0d334c78c08706f1cb785", size = 246625, upload-time = "2025-10-15T15:12:27.387Z" }, + { url = "https://files.pythonhosted.org/packages/38/54/8f5f5e84bfa268df98f46b2cb396b1009734cfb1e5d6adb663d284893b32/coverage-7.11.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ef55537ff511b5e0a43edb4c50a7bf7ba1c3eea20b4f49b1490f1e8e0e42c591", size = 243568, upload-time = "2025-10-15T15:12:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/68/30/8ba337c2877fe3f2e1af0ed7ff4be0c0c4aca44d6f4007040f3ca2255e99/coverage-7.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cbabd8f4d0d3dc571d77ae5bdbfa6afe5061e679a9d74b6797c48d143307088", size = 244665, upload-time = "2025-10-15T15:12:30.297Z" }, + { url = "https://files.pythonhosted.org/packages/cc/fb/c6f1d6d9a665536b7dde2333346f0cc41dc6a60bd1ffc10cd5c33e7eb000/coverage-7.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e24045453384e0ae2a587d562df2a04d852672eb63051d16096d3f08aa4c7c2f", size = 242681, upload-time = "2025-10-15T15:12:32.326Z" }, + { url = "https://files.pythonhosted.org/packages/be/38/1b532319af5f991fa153c20373291dc65c2bf532af7dbcffdeef745c8f79/coverage-7.11.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:7161edd3426c8d19bdccde7d49e6f27f748f3c31cc350c5de7c633fea445d866", size = 242912, upload-time = "2025-10-15T15:12:34.079Z" }, + { url = "https://files.pythonhosted.org/packages/67/3d/f39331c60ef6050d2a861dc1b514fa78f85f792820b68e8c04196ad733d6/coverage-7.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d4ed4de17e692ba6415b0587bc7f12bc80915031fc9db46a23ce70fc88c9841", size = 243559, upload-time = "2025-10-15T15:12:35.809Z" }, + { url = "https://files.pythonhosted.org/packages/4b/55/cb7c9df9d0495036ce582a8a2958d50c23cd73f84a23284bc23bd4711a6f/coverage-7.11.0-cp310-cp310-win32.whl", hash = "sha256:765c0bc8fe46f48e341ef737c91c715bd2a53a12792592296a095f0c237e09cf", size = 218266, upload-time = "2025-10-15T15:12:37.429Z" }, + { url = "https://files.pythonhosted.org/packages/68/a8/b79cb275fa7bd0208767f89d57a1b5f6ba830813875738599741b97c2e04/coverage-7.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:24d6f3128f1b2d20d84b24f4074475457faedc3d4613a7e66b5e769939c7d969", size = 219169, upload-time = "2025-10-15T15:12:39.25Z" }, + { url = "https://files.pythonhosted.org/packages/49/3a/ee1074c15c408ddddddb1db7dd904f6b81bc524e01f5a1c5920e13dbde23/coverage-7.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d58ecaa865c5b9fa56e35efc51d1014d4c0d22838815b9fce57a27dd9576847", size = 215912, upload-time = "2025-10-15T15:12:40.665Z" }, + { url = "https://files.pythonhosted.org/packages/70/c4/9f44bebe5cb15f31608597b037d78799cc5f450044465bcd1ae8cb222fe1/coverage-7.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b679e171f1c104a5668550ada700e3c4937110dbdd153b7ef9055c4f1a1ee3cc", size = 216310, upload-time = "2025-10-15T15:12:42.461Z" }, + { url = "https://files.pythonhosted.org/packages/42/01/5e06077cfef92d8af926bdd86b84fb28bf9bc6ad27343d68be9b501d89f2/coverage-7.11.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca61691ba8c5b6797deb221a0d09d7470364733ea9c69425a640f1f01b7c5bf0", size = 246706, upload-time = "2025-10-15T15:12:44.001Z" }, + { url = "https://files.pythonhosted.org/packages/40/b8/7a3f1f33b35cc4a6c37e759137533119560d06c0cc14753d1a803be0cd4a/coverage-7.11.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aef1747ede4bd8ca9cfc04cc3011516500c6891f1b33a94add3253f6f876b7b7", size = 248634, upload-time = "2025-10-15T15:12:45.768Z" }, + { url = "https://files.pythonhosted.org/packages/7a/41/7f987eb33de386bc4c665ab0bf98d15fcf203369d6aacae74f5dd8ec489a/coverage-7.11.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1839d08406e4cba2953dcc0ffb312252f14d7c4c96919f70167611f4dee2623", size = 250741, upload-time = "2025-10-15T15:12:47.222Z" }, + { url = "https://files.pythonhosted.org/packages/23/c1/a4e0ca6a4e83069fb8216b49b30a7352061ca0cb38654bd2dc96b7b3b7da/coverage-7.11.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0eb0a2dcc62478eb5b4cbb80b97bdee852d7e280b90e81f11b407d0b81c4287", size = 246837, upload-time = "2025-10-15T15:12:48.904Z" }, + { url = "https://files.pythonhosted.org/packages/5d/03/ced062a17f7c38b4728ff76c3acb40d8465634b20b4833cdb3cc3a74e115/coverage-7.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fbea96343b53f65d5351d8fd3b34fd415a2670d7c300b06d3e14a5af4f552", size = 248429, upload-time = "2025-10-15T15:12:50.73Z" }, + { url = "https://files.pythonhosted.org/packages/97/af/a7c6f194bb8c5a2705ae019036b8fe7f49ea818d638eedb15fdb7bed227c/coverage-7.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:214b622259dd0cf435f10241f1333d32caa64dbc27f8790ab693428a141723de", size = 246490, upload-time = "2025-10-15T15:12:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/ab/c3/aab4df02b04a8fde79068c3c41ad7a622b0ef2b12e1ed154da986a727c3f/coverage-7.11.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:258d9967520cca899695d4eb7ea38be03f06951d6ca2f21fb48b1235f791e601", size = 246208, upload-time = "2025-10-15T15:12:54.586Z" }, + { url = "https://files.pythonhosted.org/packages/30/d8/e282ec19cd658238d60ed404f99ef2e45eed52e81b866ab1518c0d4163cf/coverage-7.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf9e6ff4ca908ca15c157c409d608da77a56a09877b97c889b98fb2c32b6465e", size = 247126, upload-time = "2025-10-15T15:12:56.485Z" }, + { url = "https://files.pythonhosted.org/packages/d1/17/a635fa07fac23adb1a5451ec756216768c2767efaed2e4331710342a3399/coverage-7.11.0-cp311-cp311-win32.whl", hash = "sha256:fcc15fc462707b0680cff6242c48625da7f9a16a28a41bb8fd7a4280920e676c", size = 218314, upload-time = "2025-10-15T15:12:58.365Z" }, + { url = "https://files.pythonhosted.org/packages/2a/29/2ac1dfcdd4ab9a70026edc8d715ece9b4be9a1653075c658ee6f271f394d/coverage-7.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:865965bf955d92790f1facd64fe7ff73551bd2c1e7e6b26443934e9701ba30b9", size = 219203, upload-time = "2025-10-15T15:12:59.902Z" }, + { url = "https://files.pythonhosted.org/packages/03/21/5ce8b3a0133179115af4c041abf2ee652395837cb896614beb8ce8ddcfd9/coverage-7.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:5693e57a065760dcbeb292d60cc4d0231a6d4b6b6f6a3191561e1d5e8820b745", size = 217879, upload-time = "2025-10-15T15:13:01.35Z" }, + { url = "https://files.pythonhosted.org/packages/c4/db/86f6906a7c7edc1a52b2c6682d6dd9be775d73c0dfe2b84f8923dfea5784/coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1", size = 216098, upload-time = "2025-10-15T15:13:02.916Z" }, + { url = "https://files.pythonhosted.org/packages/21/54/e7b26157048c7ba555596aad8569ff903d6cd67867d41b75287323678ede/coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007", size = 216331, upload-time = "2025-10-15T15:13:04.403Z" }, + { url = "https://files.pythonhosted.org/packages/b9/19/1ce6bf444f858b83a733171306134a0544eaddf1ca8851ede6540a55b2ad/coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46", size = 247825, upload-time = "2025-10-15T15:13:05.92Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/d3bcbbc259fcced5fb67c5d78f6e7ee965f49760c14afd931e9e663a83b2/coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893", size = 250573, upload-time = "2025-10-15T15:13:07.471Z" }, + { url = "https://files.pythonhosted.org/packages/58/8d/b0ff3641a320abb047258d36ed1c21d16be33beed4152628331a1baf3365/coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115", size = 251706, upload-time = "2025-10-15T15:13:09.4Z" }, + { url = "https://files.pythonhosted.org/packages/59/c8/5a586fe8c7b0458053d9c687f5cff515a74b66c85931f7fe17a1c958b4ac/coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415", size = 248221, upload-time = "2025-10-15T15:13:10.964Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ff/3a25e3132804ba44cfa9a778cdf2b73dbbe63ef4b0945e39602fc896ba52/coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186", size = 249624, upload-time = "2025-10-15T15:13:12.5Z" }, + { url = "https://files.pythonhosted.org/packages/c5/12/ff10c8ce3895e1b17a73485ea79ebc1896a9e466a9d0f4aef63e0d17b718/coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d", size = 247744, upload-time = "2025-10-15T15:13:14.554Z" }, + { url = "https://files.pythonhosted.org/packages/16/02/d500b91f5471b2975947e0629b8980e5e90786fe316b6d7299852c1d793d/coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d", size = 247325, upload-time = "2025-10-15T15:13:16.438Z" }, + { url = "https://files.pythonhosted.org/packages/77/11/dee0284fbbd9cd64cfce806b827452c6df3f100d9e66188e82dfe771d4af/coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2", size = 249180, upload-time = "2025-10-15T15:13:17.959Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/cdf1def928f0a150a057cab03286774e73e29c2395f0d30ce3d9e9f8e697/coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5", size = 218479, upload-time = "2025-10-15T15:13:19.608Z" }, + { url = "https://files.pythonhosted.org/packages/ff/55/e5884d55e031da9c15b94b90a23beccc9d6beee65e9835cd6da0a79e4f3a/coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0", size = 219290, upload-time = "2025-10-15T15:13:21.593Z" }, + { url = "https://files.pythonhosted.org/packages/23/a8/faa930cfc71c1d16bc78f9a19bb73700464f9c331d9e547bfbc1dbd3a108/coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad", size = 217924, upload-time = "2025-10-15T15:13:23.39Z" }, + { url = "https://files.pythonhosted.org/packages/60/7f/85e4dfe65e400645464b25c036a26ac226cf3a69d4a50c3934c532491cdd/coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1", size = 216129, upload-time = "2025-10-15T15:13:25.371Z" }, + { url = "https://files.pythonhosted.org/packages/96/5d/dc5fa98fea3c175caf9d360649cb1aa3715e391ab00dc78c4c66fabd7356/coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be", size = 216380, upload-time = "2025-10-15T15:13:26.976Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f5/3da9cc9596708273385189289c0e4d8197d37a386bdf17619013554b3447/coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d", size = 247375, upload-time = "2025-10-15T15:13:28.923Z" }, + { url = "https://files.pythonhosted.org/packages/65/6c/f7f59c342359a235559d2bc76b0c73cfc4bac7d61bb0df210965cb1ecffd/coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82", size = 249978, upload-time = "2025-10-15T15:13:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/e7/8c/042dede2e23525e863bf1ccd2b92689692a148d8b5fd37c37899ba882645/coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52", size = 251253, upload-time = "2025-10-15T15:13:32.174Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a9/3c58df67bfa809a7bddd786356d9c5283e45d693edb5f3f55d0986dd905a/coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b", size = 247591, upload-time = "2025-10-15T15:13:34.147Z" }, + { url = "https://files.pythonhosted.org/packages/26/5b/c7f32efd862ee0477a18c41e4761305de6ddd2d49cdeda0c1116227570fd/coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4", size = 249411, upload-time = "2025-10-15T15:13:38.425Z" }, + { url = "https://files.pythonhosted.org/packages/76/b5/78cb4f1e86c1611431c990423ec0768122905b03837e1b4c6a6f388a858b/coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd", size = 247303, upload-time = "2025-10-15T15:13:40.464Z" }, + { url = "https://files.pythonhosted.org/packages/87/c9/23c753a8641a330f45f221286e707c427e46d0ffd1719b080cedc984ec40/coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc", size = 247157, upload-time = "2025-10-15T15:13:42.087Z" }, + { url = "https://files.pythonhosted.org/packages/c5/42/6e0cc71dc8a464486e944a4fa0d85bdec031cc2969e98ed41532a98336b9/coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48", size = 248921, upload-time = "2025-10-15T15:13:43.715Z" }, + { url = "https://files.pythonhosted.org/packages/e8/1c/743c2ef665e6858cccb0f84377dfe3a4c25add51e8c7ef19249be92465b6/coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040", size = 218526, upload-time = "2025-10-15T15:13:45.336Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d5/226daadfd1bf8ddbccefbd3aa3547d7b960fb48e1bdac124e2dd13a2b71a/coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05", size = 219317, upload-time = "2025-10-15T15:13:47.401Z" }, + { url = "https://files.pythonhosted.org/packages/97/54/47db81dcbe571a48a298f206183ba8a7ba79200a37cd0d9f4788fcd2af4a/coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a", size = 217948, upload-time = "2025-10-15T15:13:49.096Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8b/cb68425420154e7e2a82fd779a8cc01549b6fa83c2ad3679cd6c088ebd07/coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b", size = 216837, upload-time = "2025-10-15T15:13:51.09Z" }, + { url = "https://files.pythonhosted.org/packages/33/55/9d61b5765a025685e14659c8d07037247de6383c0385757544ffe4606475/coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37", size = 217061, upload-time = "2025-10-15T15:13:52.747Z" }, + { url = "https://files.pythonhosted.org/packages/52/85/292459c9186d70dcec6538f06ea251bc968046922497377bf4a1dc9a71de/coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de", size = 258398, upload-time = "2025-10-15T15:13:54.45Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e2/46edd73fb8bf51446c41148d81944c54ed224854812b6ca549be25113ee0/coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f", size = 260574, upload-time = "2025-10-15T15:13:56.145Z" }, + { url = "https://files.pythonhosted.org/packages/07/5e/1df469a19007ff82e2ca8fe509822820a31e251f80ee7344c34f6cd2ec43/coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c", size = 262797, upload-time = "2025-10-15T15:13:58.635Z" }, + { url = "https://files.pythonhosted.org/packages/f9/50/de216b31a1434b94d9b34a964c09943c6be45069ec704bfc379d8d89a649/coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa", size = 257361, upload-time = "2025-10-15T15:14:00.409Z" }, + { url = "https://files.pythonhosted.org/packages/82/1e/3f9f8344a48111e152e0fd495b6fff13cc743e771a6050abf1627a7ba918/coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740", size = 260349, upload-time = "2025-10-15T15:14:02.188Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/3f52741f9e7d82124272f3070bbe316006a7de1bad1093f88d59bfc6c548/coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef", size = 258114, upload-time = "2025-10-15T15:14:03.907Z" }, + { url = "https://files.pythonhosted.org/packages/0b/8b/918f0e15f0365d50d3986bbd3338ca01178717ac5678301f3f547b6619e6/coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0", size = 256723, upload-time = "2025-10-15T15:14:06.324Z" }, + { url = "https://files.pythonhosted.org/packages/44/9e/7776829f82d3cf630878a7965a7d70cc6ca94f22c7d20ec4944f7148cb46/coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca", size = 259238, upload-time = "2025-10-15T15:14:08.002Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b8/49cf253e1e7a3bedb85199b201862dd7ca4859f75b6cf25ffa7298aa0760/coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2", size = 219180, upload-time = "2025-10-15T15:14:09.786Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e1/1a541703826be7ae2125a0fb7f821af5729d56bb71e946e7b933cc7a89a4/coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268", size = 220241, upload-time = "2025-10-15T15:14:11.471Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d1/5ee0e0a08621140fd418ec4020f595b4d52d7eb429ae6a0c6542b4ba6f14/coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836", size = 218510, upload-time = "2025-10-15T15:14:13.46Z" }, + { url = "https://files.pythonhosted.org/packages/f4/06/e923830c1985ce808e40a3fa3eb46c13350b3224b7da59757d37b6ce12b8/coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497", size = 216110, upload-time = "2025-10-15T15:14:15.157Z" }, + { url = "https://files.pythonhosted.org/packages/42/82/cdeed03bfead45203fb651ed756dfb5266028f5f939e7f06efac4041dad5/coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e", size = 216395, upload-time = "2025-10-15T15:14:16.863Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ba/e1c80caffc3199aa699813f73ff097bc2df7b31642bdbc7493600a8f1de5/coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1", size = 247433, upload-time = "2025-10-15T15:14:18.589Z" }, + { url = "https://files.pythonhosted.org/packages/80/c0/5b259b029694ce0a5bbc1548834c7ba3db41d3efd3474489d7efce4ceb18/coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca", size = 249970, upload-time = "2025-10-15T15:14:20.307Z" }, + { url = "https://files.pythonhosted.org/packages/8c/86/171b2b5e1aac7e2fd9b43f7158b987dbeb95f06d1fbecad54ad8163ae3e8/coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd", size = 251324, upload-time = "2025-10-15T15:14:22.419Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7e/7e10414d343385b92024af3932a27a1caf75c6e27ee88ba211221ff1a145/coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43", size = 247445, upload-time = "2025-10-15T15:14:24.205Z" }, + { url = "https://files.pythonhosted.org/packages/c4/3b/e4f966b21f5be8c4bf86ad75ae94efa0de4c99c7bbb8114476323102e345/coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777", size = 249324, upload-time = "2025-10-15T15:14:26.234Z" }, + { url = "https://files.pythonhosted.org/packages/00/a2/8479325576dfcd909244d0df215f077f47437ab852ab778cfa2f8bf4d954/coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2", size = 247261, upload-time = "2025-10-15T15:14:28.42Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d8/3a9e2db19d94d65771d0f2e21a9ea587d11b831332a73622f901157cc24b/coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d", size = 247092, upload-time = "2025-10-15T15:14:30.784Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b1/bbca3c472544f9e2ad2d5116b2379732957048be4b93a9c543fcd0207e5f/coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4", size = 248755, upload-time = "2025-10-15T15:14:32.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/49/638d5a45a6a0f00af53d6b637c87007eb2297042186334e9923a61aa8854/coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721", size = 218793, upload-time = "2025-10-15T15:14:34.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/cc/b675a51f2d068adb3cdf3799212c662239b0ca27f4691d1fff81b92ea850/coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad", size = 219587, upload-time = "2025-10-15T15:14:37.047Z" }, + { url = "https://files.pythonhosted.org/packages/93/98/5ac886876026de04f00820e5094fe22166b98dcb8b426bf6827aaf67048c/coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479", size = 218168, upload-time = "2025-10-15T15:14:38.861Z" }, + { url = "https://files.pythonhosted.org/packages/14/d1/b4145d35b3e3ecf4d917e97fc8895bcf027d854879ba401d9ff0f533f997/coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f", size = 216850, upload-time = "2025-10-15T15:14:40.651Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d1/7f645fc2eccd318369a8a9948acc447bb7c1ade2911e31d3c5620544c22b/coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e", size = 217071, upload-time = "2025-10-15T15:14:42.755Z" }, + { url = "https://files.pythonhosted.org/packages/54/7d/64d124649db2737ceced1dfcbdcb79898d5868d311730f622f8ecae84250/coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44", size = 258570, upload-time = "2025-10-15T15:14:44.542Z" }, + { url = "https://files.pythonhosted.org/packages/6c/3f/6f5922f80dc6f2d8b2c6f974835c43f53eb4257a7797727e6ca5b7b2ec1f/coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3", size = 260738, upload-time = "2025-10-15T15:14:46.436Z" }, + { url = "https://files.pythonhosted.org/packages/0e/5f/9e883523c4647c860b3812b417a2017e361eca5b635ee658387dc11b13c1/coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b", size = 262994, upload-time = "2025-10-15T15:14:48.3Z" }, + { url = "https://files.pythonhosted.org/packages/07/bb/43b5a8e94c09c8bf51743ffc65c4c841a4ca5d3ed191d0a6919c379a1b83/coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d", size = 257282, upload-time = "2025-10-15T15:14:50.236Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e5/0ead8af411411330b928733e1d201384b39251a5f043c1612970310e8283/coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2", size = 260430, upload-time = "2025-10-15T15:14:52.413Z" }, + { url = "https://files.pythonhosted.org/packages/ae/66/03dd8bb0ba5b971620dcaac145461950f6d8204953e535d2b20c6b65d729/coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e", size = 258190, upload-time = "2025-10-15T15:14:54.268Z" }, + { url = "https://files.pythonhosted.org/packages/45/ae/28a9cce40bf3174426cb2f7e71ee172d98e7f6446dff936a7ccecee34b14/coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996", size = 256658, upload-time = "2025-10-15T15:14:56.436Z" }, + { url = "https://files.pythonhosted.org/packages/5c/7c/3a44234a8599513684bfc8684878fd7b126c2760f79712bb78c56f19efc4/coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11", size = 259342, upload-time = "2025-10-15T15:14:58.538Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e6/0108519cba871af0351725ebdb8660fd7a0fe2ba3850d56d32490c7d9b4b/coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73", size = 219568, upload-time = "2025-10-15T15:15:00.382Z" }, + { url = "https://files.pythonhosted.org/packages/c9/76/44ba876e0942b4e62fdde23ccb029ddb16d19ba1bef081edd00857ba0b16/coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547", size = 220687, upload-time = "2025-10-15T15:15:02.322Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0c/0df55ecb20d0d0ed5c322e10a441775e1a3a5d78c60f0c4e1abfe6fcf949/coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3", size = 218711, upload-time = "2025-10-15T15:15:04.575Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/642c1d8a448ae5ea1369eac8495740a79eb4e581a9fb0cbdce56bbf56da1/coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68", size = 207761, upload-time = "2025-10-15T15:15:06.439Z" }, ] [package.optional-dependencies] @@ -383,11 +408,11 @@ wheels = [ [[package]] name = "idna" -version = "3.10" +version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] [[package]] @@ -401,75 +426,99 @@ wheels = [ [[package]] name = "jiter" -version = "0.11.0" +version = "0.11.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/68/0357982493a7b20925aece061f7fb7a2678e3b232f8d73a6edb7e5304443/jiter-0.11.1.tar.gz", hash = "sha256:849dcfc76481c0ea0099391235b7ca97d7279e0fa4c86005457ac7c88e8b76dc", size = 168385, upload-time = "2025-10-17T11:31:15.186Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/21/7dd1235a19e26979be6098e87e4cced2e061752f3a40a17bbce6dea7fae1/jiter-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3893ce831e1c0094a83eeaf56c635a167d6fa8cc14393cc14298fd6fdc2a2449", size = 309875, upload-time = "2025-09-15T09:18:48.41Z" }, - { url = "https://files.pythonhosted.org/packages/71/f9/462b54708aa85b135733ccba70529dd68a18511bf367a87c5fd28676c841/jiter-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:25c625b9b61b5a8725267fdf867ef2e51b429687f6a4eef211f4612e95607179", size = 316505, upload-time = "2025-09-15T09:18:51.057Z" }, - { url = "https://files.pythonhosted.org/packages/bd/40/14e2eeaac6a47bff27d213834795472355fd39769272eb53cb7aa83d5aa8/jiter-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd4ca85fb6a62cf72e1c7f5e34ddef1b660ce4ed0886ec94a1ef9777d35eaa1f", size = 337613, upload-time = "2025-09-15T09:18:52.358Z" }, - { url = "https://files.pythonhosted.org/packages/d3/ed/a5f1f8419c92b150a7c7fb5ccba1fb1e192887ad713d780e70874f0ce996/jiter-0.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:572208127034725e79c28437b82414028c3562335f2b4f451d98136d0fc5f9cd", size = 361438, upload-time = "2025-09-15T09:18:54.637Z" }, - { url = "https://files.pythonhosted.org/packages/dd/f5/70682c023dfcdd463a53faf5d30205a7d99c51d70d3e303c932d0936e5a2/jiter-0.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494ba627c7f550ad3dabb21862864b8f2216098dc18ff62f37b37796f2f7c325", size = 486180, upload-time = "2025-09-15T09:18:56.158Z" }, - { url = "https://files.pythonhosted.org/packages/7c/39/020d08cbab4eab48142ad88b837c41eb08a15c0767fdb7c0d3265128a44b/jiter-0.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8da18a99f58bca3ecc2d2bba99cac000a924e115b6c4f0a2b98f752b6fbf39a", size = 376681, upload-time = "2025-09-15T09:18:57.553Z" }, - { url = "https://files.pythonhosted.org/packages/52/10/b86733f6e594cf51dd142f37c602d8df87c554c5844958deaab0de30eb5d/jiter-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ffd3b0fff3fabbb02cc09910c08144db6bb5697a98d227a074401e01ee63dd", size = 348685, upload-time = "2025-09-15T09:18:59.208Z" }, - { url = "https://files.pythonhosted.org/packages/fb/ee/8861665e83a9e703aa5f65fddddb6225428e163e6b0baa95a7f9a8fb9aae/jiter-0.11.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fe6530aa738a4f7d4e4702aa8f9581425d04036a5f9e25af65ebe1f708f23be", size = 385573, upload-time = "2025-09-15T09:19:00.593Z" }, - { url = "https://files.pythonhosted.org/packages/25/74/05afec03600951f128293813b5a208c9ba1bf587c57a344c05a42a69e1b1/jiter-0.11.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e35d66681c133a03d7e974e7eedae89720fe8ca3bd09f01a4909b86a8adf31f5", size = 516669, upload-time = "2025-09-15T09:19:02.369Z" }, - { url = "https://files.pythonhosted.org/packages/93/d1/2e5bfe147cfbc2a5eef7f73eb75dc5c6669da4fa10fc7937181d93af9495/jiter-0.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c59459beca2fbc9718b6f1acb7bfb59ebc3eb4294fa4d40e9cb679dafdcc6c60", size = 508767, upload-time = "2025-09-15T09:19:04.011Z" }, - { url = "https://files.pythonhosted.org/packages/87/50/597f71307e10426b5c082fd05d38c615ddbdd08c3348d8502963307f0652/jiter-0.11.0-cp310-cp310-win32.whl", hash = "sha256:b7b0178417b0dcfc5f259edbc6db2b1f5896093ed9035ee7bab0f2be8854726d", size = 205476, upload-time = "2025-09-15T09:19:05.594Z" }, - { url = "https://files.pythonhosted.org/packages/c7/86/1e5214b3272e311754da26e63edec93a183811d4fc2e0118addec365df8b/jiter-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:11df2bf99fb4754abddd7f5d940a48e51f9d11624d6313ca4314145fcad347f0", size = 204708, upload-time = "2025-09-15T09:19:06.955Z" }, - { url = "https://files.pythonhosted.org/packages/38/55/a69fefeef09c2eaabae44b935a1aa81517e49639c0a0c25d861cb18cd7ac/jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222", size = 309503, upload-time = "2025-09-15T09:19:08.191Z" }, - { url = "https://files.pythonhosted.org/packages/bd/d5/a6aba9e6551f32f9c127184f398208e4eddb96c59ac065c8a92056089d28/jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d", size = 317688, upload-time = "2025-09-15T09:19:09.918Z" }, - { url = "https://files.pythonhosted.org/packages/bb/f3/5e86f57c1883971cdc8535d0429c2787bf734840a231da30a3be12850562/jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7", size = 337418, upload-time = "2025-09-15T09:19:11.078Z" }, - { url = "https://files.pythonhosted.org/packages/5e/4f/a71d8a24c2a70664970574a8e0b766663f5ef788f7fe1cc20ee0c016d488/jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d", size = 361423, upload-time = "2025-09-15T09:19:13.286Z" }, - { url = "https://files.pythonhosted.org/packages/8f/e5/b09076f4e7fd9471b91e16f9f3dc7330b161b738f3b39b2c37054a36e26a/jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09", size = 486367, upload-time = "2025-09-15T09:19:14.546Z" }, - { url = "https://files.pythonhosted.org/packages/fb/f1/98cb3a36f5e62f80cd860f0179f948d9eab5a316d55d3e1bab98d9767af5/jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789", size = 376335, upload-time = "2025-09-15T09:19:15.939Z" }, - { url = "https://files.pythonhosted.org/packages/9f/d8/ec74886497ea393c29dbd7651ddecc1899e86404a6b1f84a3ddab0ab59fd/jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347", size = 348981, upload-time = "2025-09-15T09:19:17.568Z" }, - { url = "https://files.pythonhosted.org/packages/24/93/d22ad7fa3b86ade66c86153ceea73094fc2af8b20c59cb7fceab9fea4704/jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648", size = 385797, upload-time = "2025-09-15T09:19:19.121Z" }, - { url = "https://files.pythonhosted.org/packages/c8/bd/e25ff4a4df226e9b885f7cb01ee4b9dc74e3000e612d6f723860d71a1f34/jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4", size = 516597, upload-time = "2025-09-15T09:19:20.301Z" }, - { url = "https://files.pythonhosted.org/packages/be/fb/beda613db7d93ffa2fdd2683f90f2f5dce8daf4bc2d0d2829e7de35308c6/jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1", size = 508853, upload-time = "2025-09-15T09:19:22.075Z" }, - { url = "https://files.pythonhosted.org/packages/20/64/c5b0d93490634e41e38e2a15de5d54fdbd2c9f64a19abb0f95305b63373c/jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982", size = 205140, upload-time = "2025-09-15T09:19:23.351Z" }, - { url = "https://files.pythonhosted.org/packages/a1/e6/c347c0e6f5796e97d4356b7e5ff0ce336498b7f4ef848fae621a56f1ccf3/jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7", size = 204311, upload-time = "2025-09-15T09:19:24.591Z" }, - { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, - { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, - { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, - { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, - { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, - { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, - { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, - { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, - { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, - { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, - { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, - { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, - { url = "https://files.pythonhosted.org/packages/97/c4/d530e514d0f4f29b2b68145e7b389cbc7cac7f9c8c23df43b04d3d10fa3e/jiter-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4441a91b80a80249f9a6452c14b2c24708f139f64de959943dfeaa6cb915e8eb", size = 305021, upload-time = "2025-09-15T09:19:43.523Z" }, - { url = "https://files.pythonhosted.org/packages/7a/77/796a19c567c5734cbfc736a6f987affc0d5f240af8e12063c0fb93990ffa/jiter-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ff85fc6d2a431251ad82dbd1ea953affb5a60376b62e7d6809c5cd058bb39471", size = 314384, upload-time = "2025-09-15T09:19:44.849Z" }, - { url = "https://files.pythonhosted.org/packages/14/9c/824334de0b037b91b6f3fa9fe5a191c83977c7ec4abe17795d3cb6d174cf/jiter-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e86126d64706fd28dfc46f910d496923c6f95b395138c02d0e252947f452bd", size = 337389, upload-time = "2025-09-15T09:19:46.094Z" }, - { url = "https://files.pythonhosted.org/packages/a2/95/ed4feab69e6cf9b2176ea29d4ef9d01a01db210a3a2c8a31a44ecdc68c38/jiter-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad8bd82165961867a10f52010590ce0b7a8c53da5ddd8bbb62fef68c181b921", size = 360519, upload-time = "2025-09-15T09:19:47.494Z" }, - { url = "https://files.pythonhosted.org/packages/b5/0c/2ad00f38d3e583caba3909d95b7da1c3a7cd82c0aa81ff4317a8016fb581/jiter-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b42c2cd74273455ce439fd9528db0c6e84b5623cb74572305bdd9f2f2961d3df", size = 487198, upload-time = "2025-09-15T09:19:49.116Z" }, - { url = "https://files.pythonhosted.org/packages/ea/8b/919b64cf3499b79bdfba6036da7b0cac5d62d5c75a28fb45bad7819e22f0/jiter-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0062dab98172dd0599fcdbf90214d0dcde070b1ff38a00cc1b90e111f071982", size = 377835, upload-time = "2025-09-15T09:19:50.468Z" }, - { url = "https://files.pythonhosted.org/packages/29/7f/8ebe15b6e0a8026b0d286c083b553779b4dd63db35b43a3f171b544de91d/jiter-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb948402821bc76d1f6ef0f9e19b816f9b09f8577844ba7140f0b6afe994bc64", size = 347655, upload-time = "2025-09-15T09:19:51.726Z" }, - { url = "https://files.pythonhosted.org/packages/8e/64/332127cef7e94ac75719dda07b9a472af6158ba819088d87f17f3226a769/jiter-0.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25a5b1110cca7329fd0daf5060faa1234be5c11e988948e4f1a1923b6a457fe1", size = 386135, upload-time = "2025-09-15T09:19:53.075Z" }, - { url = "https://files.pythonhosted.org/packages/20/c8/557b63527442f84c14774159948262a9d4fabb0d61166f11568f22fc60d2/jiter-0.11.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:bf11807e802a214daf6c485037778843fadd3e2ec29377ae17e0706ec1a25758", size = 516063, upload-time = "2025-09-15T09:19:54.447Z" }, - { url = "https://files.pythonhosted.org/packages/86/13/4164c819df4a43cdc8047f9a42880f0ceef5afeb22e8b9675c0528ebdccd/jiter-0.11.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbb57da40631c267861dd0090461222060960012d70fd6e4c799b0f62d0ba166", size = 508139, upload-time = "2025-09-15T09:19:55.764Z" }, - { url = "https://files.pythonhosted.org/packages/fa/70/6e06929b401b331d41ddb4afb9f91cd1168218e3371972f0afa51c9f3c31/jiter-0.11.0-cp313-cp313-win32.whl", hash = "sha256:8e36924dad32c48d3c5e188d169e71dc6e84d6cb8dedefea089de5739d1d2f80", size = 206369, upload-time = "2025-09-15T09:19:57.048Z" }, - { url = "https://files.pythonhosted.org/packages/f4/0d/8185b8e15de6dce24f6afae63380e16377dd75686d56007baa4f29723ea1/jiter-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:452d13e4fd59698408087235259cebe67d9d49173b4dacb3e8d35ce4acf385d6", size = 202538, upload-time = "2025-09-15T09:19:58.35Z" }, - { url = "https://files.pythonhosted.org/packages/13/3a/d61707803260d59520721fa326babfae25e9573a88d8b7b9cb54c5423a59/jiter-0.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:089f9df9f69532d1339e83142438668f52c97cd22ee2d1195551c2b1a9e6cf33", size = 313737, upload-time = "2025-09-15T09:19:59.638Z" }, - { url = "https://files.pythonhosted.org/packages/cd/cc/c9f0eec5d00f2a1da89f6bdfac12b8afdf8d5ad974184863c75060026457/jiter-0.11.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ed1fe69a8c69bf0f2a962d8d706c7b89b50f1332cd6b9fbda014f60bd03a03", size = 346183, upload-time = "2025-09-15T09:20:01.442Z" }, - { url = "https://files.pythonhosted.org/packages/a6/87/fc632776344e7aabbab05a95a0075476f418c5d29ab0f2eec672b7a1f0ac/jiter-0.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a4d71d7ea6ea8786291423fe209acf6f8d398a0759d03e7f24094acb8ab686ba", size = 204225, upload-time = "2025-09-15T09:20:03.102Z" }, - { url = "https://files.pythonhosted.org/packages/ee/3b/e7f45be7d3969bdf2e3cd4b816a7a1d272507cd0edd2d6dc4b07514f2d9a/jiter-0.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9a6dff27eca70930bdbe4cbb7c1a4ba8526e13b63dc808c0670083d2d51a4a72", size = 304414, upload-time = "2025-09-15T09:20:04.357Z" }, - { url = "https://files.pythonhosted.org/packages/06/32/13e8e0d152631fcc1907ceb4943711471be70496d14888ec6e92034e2caf/jiter-0.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b1ae2a7593a62132c7d4c2abbee80bbbb94fdc6d157e2c6cc966250c564ef774", size = 314223, upload-time = "2025-09-15T09:20:05.631Z" }, - { url = "https://files.pythonhosted.org/packages/0c/7e/abedd5b5a20ca083f778d96bba0d2366567fcecb0e6e34ff42640d5d7a18/jiter-0.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b13a431dba4b059e9e43019d3022346d009baf5066c24dcdea321a303cde9f0", size = 337306, upload-time = "2025-09-15T09:20:06.917Z" }, - { url = "https://files.pythonhosted.org/packages/ac/e2/30d59bdc1204c86aa975ec72c48c482fee6633120ee9c3ab755e4dfefea8/jiter-0.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:af62e84ca3889604ebb645df3b0a3f3bcf6b92babbff642bd214616f57abb93a", size = 360565, upload-time = "2025-09-15T09:20:08.283Z" }, - { url = "https://files.pythonhosted.org/packages/fe/88/567288e0d2ed9fa8f7a3b425fdaf2cb82b998633c24fe0d98f5417321aa8/jiter-0.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f3b32bb723246e6b351aecace52aba78adb8eeb4b2391630322dc30ff6c773", size = 486465, upload-time = "2025-09-15T09:20:09.613Z" }, - { url = "https://files.pythonhosted.org/packages/18/6e/7b72d09273214cadd15970e91dd5ed9634bee605176107db21e1e4205eb1/jiter-0.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:adcab442f4a099a358a7f562eaa54ed6456fb866e922c6545a717be51dbed7d7", size = 377581, upload-time = "2025-09-15T09:20:10.884Z" }, - { url = "https://files.pythonhosted.org/packages/58/52/4db456319f9d14deed325f70102577492e9d7e87cf7097bda9769a1fcacb/jiter-0.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9967c2ab338ee2b2c0102fd379ec2693c496abf71ffd47e4d791d1f593b68e2", size = 347102, upload-time = "2025-09-15T09:20:12.175Z" }, - { url = "https://files.pythonhosted.org/packages/ce/b4/433d5703c38b26083aec7a733eb5be96f9c6085d0e270a87ca6482cbf049/jiter-0.11.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e7d0bed3b187af8b47a981d9742ddfc1d9b252a7235471ad6078e7e4e5fe75c2", size = 386477, upload-time = "2025-09-15T09:20:13.428Z" }, - { url = "https://files.pythonhosted.org/packages/c8/7a/a60bfd9c55b55b07c5c441c5085f06420b6d493ce9db28d069cc5b45d9f3/jiter-0.11.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:f6fe0283e903ebc55f1a6cc569b8c1f3bf4abd026fed85e3ff8598a9e6f982f0", size = 516004, upload-time = "2025-09-15T09:20:14.848Z" }, - { url = "https://files.pythonhosted.org/packages/2e/46/f8363e5ecc179b4ed0ca6cb0a6d3bfc266078578c71ff30642ea2ce2f203/jiter-0.11.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:4ee5821e3d66606b29ae5b497230b304f1376f38137d69e35f8d2bd5f310ff73", size = 507855, upload-time = "2025-09-15T09:20:16.176Z" }, - { url = "https://files.pythonhosted.org/packages/90/33/396083357d51d7ff0f9805852c288af47480d30dd31d8abc74909b020761/jiter-0.11.0-cp314-cp314-win32.whl", hash = "sha256:c2d13ba7567ca8799f17c76ed56b1d49be30df996eb7fa33e46b62800562a5e2", size = 205802, upload-time = "2025-09-15T09:20:17.661Z" }, - { url = "https://files.pythonhosted.org/packages/e7/ab/eb06ca556b2551d41de7d03bf2ee24285fa3d0c58c5f8d95c64c9c3281b1/jiter-0.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fb4790497369d134a07fc763cc88888c46f734abdd66f9fdf7865038bf3a8f40", size = 313405, upload-time = "2025-09-15T09:20:18.918Z" }, - { url = "https://files.pythonhosted.org/packages/af/22/7ab7b4ec3a1c1f03aef376af11d23b05abcca3fb31fbca1e7557053b1ba2/jiter-0.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e2bbf24f16ba5ad4441a9845e40e4ea0cb9eed00e76ba94050664ef53ef4406", size = 347102, upload-time = "2025-09-15T09:20:20.16Z" }, - { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380, upload-time = "2025-09-15T09:20:36.867Z" }, + { url = "https://files.pythonhosted.org/packages/12/10/d099def5716452c8d5ffa527405373a44ddaf8e3c9d4f6de1e1344cffd90/jiter-0.11.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ed58841a491bbbf3f7c55a6b68fff568439ab73b2cce27ace0e169057b5851df", size = 310078, upload-time = "2025-10-17T11:28:36.186Z" }, + { url = "https://files.pythonhosted.org/packages/fe/56/b81d010b0031ffa96dfb590628562ac5f513ce56aa2ab451d29fb3fedeb9/jiter-0.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:499beb9b2d7e51d61095a8de39ebcab1d1778f2a74085f8305a969f6cee9f3e4", size = 317138, upload-time = "2025-10-17T11:28:38.294Z" }, + { url = "https://files.pythonhosted.org/packages/89/12/31ea12af9d79671cc7bd893bf0ccaf3467624c0fc7146a0cbfe7b549bcfa/jiter-0.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b87b2821795e28cc990939b68ce7a038edea680a24910bd68a79d54ff3f03c02", size = 348964, upload-time = "2025-10-17T11:28:40.103Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/95cb6dc5ff962410667a29708c7a6c0691cc3c4866a0bfa79d085b56ebd6/jiter-0.11.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:83f6fa494d8bba14ab100417c80e70d32d737e805cb85be2052d771c76fcd1f8", size = 363289, upload-time = "2025-10-17T11:28:41.49Z" }, + { url = "https://files.pythonhosted.org/packages/b8/3e/37006ad5843a0bc3a3ec3a6c44710d7a154113befaf5f26d2fe190668b63/jiter-0.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fbc6aea1daa2ec6f5ed465f0c5e7b0607175062ceebbea5ca70dd5ddab58083", size = 487243, upload-time = "2025-10-17T11:28:43.209Z" }, + { url = "https://files.pythonhosted.org/packages/80/5c/d38c8c801a322a0c0de47b9618c16fd766366f087ce37c4e55ae8e3c8b03/jiter-0.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:302288e2edc43174bb2db838e94688d724f9aad26c5fb9a74f7a5fb427452a6a", size = 376139, upload-time = "2025-10-17T11:28:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/b0/cd/442ad2389a5570b0ee673f93e14bbe8cdecd3e08a9ba7756081d84065e4c/jiter-0.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85db563fe3b367bb568af5d29dea4d4066d923b8e01f3417d25ebecd958de815", size = 359279, upload-time = "2025-10-17T11:28:46.152Z" }, + { url = "https://files.pythonhosted.org/packages/9a/35/8f5810d0e7d00bc395889085dbc1ccc36d454b56f28b2a5359dfd1bab48d/jiter-0.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1c1ba2b6b22f775444ef53bc2d5778396d3520abc7b2e1da8eb0c27cb3ffb10", size = 384911, upload-time = "2025-10-17T11:28:48.03Z" }, + { url = "https://files.pythonhosted.org/packages/3c/bd/8c069ceb0bafcf6b4aa5de0c27f02faf50468df39564a02e1a12389ad6c2/jiter-0.11.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:523be464b14f8fd0cc78da6964b87b5515a056427a2579f9085ce30197a1b54a", size = 517879, upload-time = "2025-10-17T11:28:49.902Z" }, + { url = "https://files.pythonhosted.org/packages/bc/3c/9163efcf762f79f47433078b4f0a1bddc56096082c02c6cae2f47f07f56f/jiter-0.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:25b99b3f04cd2a38fefb22e822e35eb203a2cd37d680dbbc0c0ba966918af336", size = 508739, upload-time = "2025-10-17T11:28:51.785Z" }, + { url = "https://files.pythonhosted.org/packages/44/07/50690f257935845d3114b95b5dd03749eeaab5e395cbb522f9e957da4551/jiter-0.11.1-cp310-cp310-win32.whl", hash = "sha256:47a79e90545a596bb9104109777894033347b11180d4751a216afef14072dbe7", size = 203948, upload-time = "2025-10-17T11:28:54.368Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3a/5964a944bf2e98ffd566153fdc2a6a368fcb11b58cc46832ca8c75808dba/jiter-0.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:cace75621ae9bd66878bf69fbd4dfc1a28ef8661e0c2d0eb72d3d6f1268eddf5", size = 207522, upload-time = "2025-10-17T11:28:56.79Z" }, + { url = "https://files.pythonhosted.org/packages/8b/34/c9e6cfe876f9a24f43ed53fe29f052ce02bd8d5f5a387dbf46ad3764bef0/jiter-0.11.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9b0088ff3c374ce8ce0168523ec8e97122ebb788f950cf7bb8e39c7dc6a876a2", size = 310160, upload-time = "2025-10-17T11:28:59.174Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9f/b06ec8181d7165858faf2ac5287c54fe52b2287760b7fe1ba9c06890255f/jiter-0.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74433962dd3c3090655e02e461267095d6c84f0741c7827de11022ef8d7ff661", size = 316573, upload-time = "2025-10-17T11:29:00.905Z" }, + { url = "https://files.pythonhosted.org/packages/66/49/3179d93090f2ed0c6b091a9c210f266d2d020d82c96f753260af536371d0/jiter-0.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d98030e345e6546df2cc2c08309c502466c66c4747b043f1a0d415fada862b8", size = 348998, upload-time = "2025-10-17T11:29:02.321Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9d/63db2c8eabda7a9cad65a2e808ca34aaa8689d98d498f5a2357d7a2e2cec/jiter-0.11.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d6db0b2e788db46bec2cf729a88b6dd36959af2abd9fa2312dfba5acdd96dcb", size = 363413, upload-time = "2025-10-17T11:29:03.787Z" }, + { url = "https://files.pythonhosted.org/packages/25/ff/3e6b3170c5053053c7baddb8d44e2bf11ff44cd71024a280a8438ae6ba32/jiter-0.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55678fbbda261eafe7289165dd2ddd0e922df5f9a1ae46d7c79a5a15242bd7d1", size = 487144, upload-time = "2025-10-17T11:29:05.37Z" }, + { url = "https://files.pythonhosted.org/packages/b0/50/b63fcadf699893269b997f4c2e88400bc68f085c6db698c6e5e69d63b2c1/jiter-0.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a6b74fae8e40497653b52ce6ca0f1b13457af769af6fb9c1113efc8b5b4d9be", size = 376215, upload-time = "2025-10-17T11:29:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/39/8c/57a8a89401134167e87e73471b9cca321cf651c1fd78c45f3a0f16932213/jiter-0.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a55a453f8b035eb4f7852a79a065d616b7971a17f5e37a9296b4b38d3b619e4", size = 359163, upload-time = "2025-10-17T11:29:09.047Z" }, + { url = "https://files.pythonhosted.org/packages/4b/96/30b0cdbffbb6f753e25339d3dbbe26890c9ef119928314578201c758aace/jiter-0.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2638148099022e6bdb3f42904289cd2e403609356fb06eb36ddec2d50958bc29", size = 385344, upload-time = "2025-10-17T11:29:10.69Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d5/31dae27c1cc9410ad52bb514f11bfa4f286f7d6ef9d287b98b8831e156ec/jiter-0.11.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:252490567a5d990986f83b95a5f1ca1bf205ebd27b3e9e93bb7c2592380e29b9", size = 517972, upload-time = "2025-10-17T11:29:12.174Z" }, + { url = "https://files.pythonhosted.org/packages/61/1e/5905a7a3aceab80de13ab226fd690471a5e1ee7e554dc1015e55f1a6b896/jiter-0.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d431d52b0ca2436eea6195f0f48528202100c7deda354cb7aac0a302167594d5", size = 508408, upload-time = "2025-10-17T11:29:13.597Z" }, + { url = "https://files.pythonhosted.org/packages/91/12/1c49b97aa49077e136e8591cef7162f0d3e2860ae457a2d35868fd1521ef/jiter-0.11.1-cp311-cp311-win32.whl", hash = "sha256:db6f41e40f8bae20c86cb574b48c4fd9f28ee1c71cb044e9ec12e78ab757ba3a", size = 203937, upload-time = "2025-10-17T11:29:14.894Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9d/2255f7c17134ee9892c7e013c32d5bcf4bce64eb115402c9fe5e727a67eb/jiter-0.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0cc407b8e6cdff01b06bb80f61225c8b090c3df108ebade5e0c3c10993735b19", size = 207589, upload-time = "2025-10-17T11:29:16.166Z" }, + { url = "https://files.pythonhosted.org/packages/3c/28/6307fc8f95afef84cae6caf5429fee58ef16a582c2ff4db317ceb3e352fa/jiter-0.11.1-cp311-cp311-win_arm64.whl", hash = "sha256:fe04ea475392a91896d1936367854d346724a1045a247e5d1c196410473b8869", size = 188391, upload-time = "2025-10-17T11:29:17.488Z" }, + { url = "https://files.pythonhosted.org/packages/15/8b/318e8af2c904a9d29af91f78c1e18f0592e189bbdb8a462902d31fe20682/jiter-0.11.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c92148eec91052538ce6823dfca9525f5cfc8b622d7f07e9891a280f61b8c96c", size = 305655, upload-time = "2025-10-17T11:29:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/f7/29/6c7de6b5d6e511d9e736312c0c9bfcee8f9b6bef68182a08b1d78767e627/jiter-0.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ecd4da91b5415f183a6be8f7158d127bdd9e6a3174138293c0d48d6ea2f2009d", size = 315645, upload-time = "2025-10-17T11:29:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5f/ef9e5675511ee0eb7f98dd8c90509e1f7743dbb7c350071acae87b0145f3/jiter-0.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7e3ac25c00b9275684d47aa42febaa90a9958e19fd1726c4ecf755fbe5e553b", size = 348003, upload-time = "2025-10-17T11:29:22.712Z" }, + { url = "https://files.pythonhosted.org/packages/56/1b/abe8c4021010b0a320d3c62682769b700fb66f92c6db02d1a1381b3db025/jiter-0.11.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57d7305c0a841858f866cd459cd9303f73883fb5e097257f3d4a3920722c69d4", size = 365122, upload-time = "2025-10-17T11:29:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/2a/2d/4a18013939a4f24432f805fbd5a19893e64650b933edb057cd405275a538/jiter-0.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e86fa10e117dce22c547f31dd6d2a9a222707d54853d8de4e9a2279d2c97f239", size = 488360, upload-time = "2025-10-17T11:29:25.724Z" }, + { url = "https://files.pythonhosted.org/packages/f0/77/38124f5d02ac4131f0dfbcfd1a19a0fac305fa2c005bc4f9f0736914a1a4/jiter-0.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae5ef1d48aec7e01ee8420155d901bb1d192998fa811a65ebb82c043ee186711", size = 376884, upload-time = "2025-10-17T11:29:27.056Z" }, + { url = "https://files.pythonhosted.org/packages/7b/43/59fdc2f6267959b71dd23ce0bd8d4aeaf55566aa435a5d00f53d53c7eb24/jiter-0.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb68e7bf65c990531ad8715e57d50195daf7c8e6f1509e617b4e692af1108939", size = 358827, upload-time = "2025-10-17T11:29:28.698Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/b3cc20ff5340775ea3bbaa0d665518eddecd4266ba7244c9cb480c0c82ec/jiter-0.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43b30c8154ded5845fa454ef954ee67bfccce629b2dea7d01f795b42bc2bda54", size = 385171, upload-time = "2025-10-17T11:29:30.078Z" }, + { url = "https://files.pythonhosted.org/packages/d2/bc/94dd1f3a61f4dc236f787a097360ec061ceeebebf4ea120b924d91391b10/jiter-0.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:586cafbd9dd1f3ce6a22b4a085eaa6be578e47ba9b18e198d4333e598a91db2d", size = 518359, upload-time = "2025-10-17T11:29:31.464Z" }, + { url = "https://files.pythonhosted.org/packages/7e/8c/12ee132bd67e25c75f542c227f5762491b9a316b0dad8e929c95076f773c/jiter-0.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:677cc2517d437a83bb30019fd4cf7cad74b465914c56ecac3440d597ac135250", size = 509205, upload-time = "2025-10-17T11:29:32.895Z" }, + { url = "https://files.pythonhosted.org/packages/39/d5/9de848928ce341d463c7e7273fce90ea6d0ea4343cd761f451860fa16b59/jiter-0.11.1-cp312-cp312-win32.whl", hash = "sha256:fa992af648fcee2b850a3286a35f62bbbaeddbb6dbda19a00d8fbc846a947b6e", size = 205448, upload-time = "2025-10-17T11:29:34.217Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/8002d78637e05009f5e3fb5288f9d57d65715c33b5d6aa20fd57670feef5/jiter-0.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:88b5cae9fa51efeb3d4bd4e52bfd4c85ccc9cac44282e2a9640893a042ba4d87", size = 204285, upload-time = "2025-10-17T11:29:35.446Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a2/bb24d5587e4dff17ff796716542f663deee337358006a80c8af43ddc11e5/jiter-0.11.1-cp312-cp312-win_arm64.whl", hash = "sha256:9a6cae1ab335551917f882f2c3c1efe7617b71b4c02381e4382a8fc80a02588c", size = 188712, upload-time = "2025-10-17T11:29:37.027Z" }, + { url = "https://files.pythonhosted.org/packages/7c/4b/e4dd3c76424fad02a601d570f4f2a8438daea47ba081201a721a903d3f4c/jiter-0.11.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:71b6a920a5550f057d49d0e8bcc60945a8da998019e83f01adf110e226267663", size = 305272, upload-time = "2025-10-17T11:29:39.249Z" }, + { url = "https://files.pythonhosted.org/packages/67/83/2cd3ad5364191130f4de80eacc907f693723beaab11a46c7d155b07a092c/jiter-0.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b3de72e925388453a5171be83379549300db01284f04d2a6f244d1d8de36f94", size = 314038, upload-time = "2025-10-17T11:29:40.563Z" }, + { url = "https://files.pythonhosted.org/packages/d3/3c/8e67d9ba524e97d2f04c8f406f8769a23205026b13b0938d16646d6e2d3e/jiter-0.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc19dd65a2bd3d9c044c5b4ebf657ca1e6003a97c0fc10f555aa4f7fb9821c00", size = 345977, upload-time = "2025-10-17T11:29:42.009Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a5/489ce64d992c29bccbffabb13961bbb0435e890d7f2d266d1f3df5e917d2/jiter-0.11.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d58faaa936743cd1464540562f60b7ce4fd927e695e8bc31b3da5b914baa9abd", size = 364503, upload-time = "2025-10-17T11:29:43.459Z" }, + { url = "https://files.pythonhosted.org/packages/d4/c0/e321dd83ee231d05c8fe4b1a12caf1f0e8c7a949bf4724d58397104f10f2/jiter-0.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:902640c3103625317291cb73773413b4d71847cdf9383ba65528745ff89f1d14", size = 487092, upload-time = "2025-10-17T11:29:44.835Z" }, + { url = "https://files.pythonhosted.org/packages/f9/5e/8f24ec49c8d37bd37f34ec0112e0b1a3b4b5a7b456c8efff1df5e189ad43/jiter-0.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30405f726e4c2ed487b176c09f8b877a957f535d60c1bf194abb8dadedb5836f", size = 376328, upload-time = "2025-10-17T11:29:46.175Z" }, + { url = "https://files.pythonhosted.org/packages/7f/70/ded107620e809327cf7050727e17ccfa79d6385a771b7fe38fb31318ef00/jiter-0.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3217f61728b0baadd2551844870f65219ac4a1285d5e1a4abddff3d51fdabe96", size = 356632, upload-time = "2025-10-17T11:29:47.454Z" }, + { url = "https://files.pythonhosted.org/packages/19/53/c26f7251613f6a9079275ee43c89b8a973a95ff27532c421abc2a87afb04/jiter-0.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1364cc90c03a8196f35f396f84029f12abe925415049204446db86598c8b72c", size = 384358, upload-time = "2025-10-17T11:29:49.377Z" }, + { url = "https://files.pythonhosted.org/packages/84/16/e0f2cc61e9c4d0b62f6c1bd9b9781d878a427656f88293e2a5335fa8ff07/jiter-0.11.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:53a54bf8e873820ab186b2dca9f6c3303f00d65ae5e7b7d6bda1b95aa472d646", size = 517279, upload-time = "2025-10-17T11:29:50.968Z" }, + { url = "https://files.pythonhosted.org/packages/60/5c/4cd095eaee68961bca3081acbe7c89e12ae24a5dae5fd5d2a13e01ed2542/jiter-0.11.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7e29aca023627b0e0c2392d4248f6414d566ff3974fa08ff2ac8dbb96dfee92a", size = 508276, upload-time = "2025-10-17T11:29:52.619Z" }, + { url = "https://files.pythonhosted.org/packages/4f/25/f459240e69b0e09a7706d96ce203ad615ca36b0fe832308d2b7123abf2d0/jiter-0.11.1-cp313-cp313-win32.whl", hash = "sha256:f153e31d8bca11363751e875c0a70b3d25160ecbaee7b51e457f14498fb39d8b", size = 205593, upload-time = "2025-10-17T11:29:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/461bafe22bae79bab74e217a09c907481a46d520c36b7b9fe71ee8c9e983/jiter-0.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:f773f84080b667c69c4ea0403fc67bb08b07e2b7ce1ef335dea5868451e60fed", size = 203518, upload-time = "2025-10-17T11:29:55.216Z" }, + { url = "https://files.pythonhosted.org/packages/7b/72/c45de6e320edb4fa165b7b1a414193b3cae302dd82da2169d315dcc78b44/jiter-0.11.1-cp313-cp313-win_arm64.whl", hash = "sha256:635ecd45c04e4c340d2187bcb1cea204c7cc9d32c1364d251564bf42e0e39c2d", size = 188062, upload-time = "2025-10-17T11:29:56.631Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/4a57922437ca8753ef823f434c2dec5028b237d84fa320f06a3ba1aec6e8/jiter-0.11.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d892b184da4d94d94ddb4031296931c74ec8b325513a541ebfd6dfb9ae89904b", size = 313814, upload-time = "2025-10-17T11:29:58.509Z" }, + { url = "https://files.pythonhosted.org/packages/76/50/62a0683dadca25490a4bedc6a88d59de9af2a3406dd5a576009a73a1d392/jiter-0.11.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa22c223a3041dacb2fcd37c70dfd648b44662b4a48e242592f95bda5ab09d58", size = 344987, upload-time = "2025-10-17T11:30:00.208Z" }, + { url = "https://files.pythonhosted.org/packages/da/00/2355dbfcbf6cdeaddfdca18287f0f38ae49446bb6378e4a5971e9356fc8a/jiter-0.11.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:330e8e6a11ad4980cd66a0f4a3e0e2e0f646c911ce047014f984841924729789", size = 356399, upload-time = "2025-10-17T11:30:02.084Z" }, + { url = "https://files.pythonhosted.org/packages/c9/07/c2bd748d578fa933d894a55bff33f983bc27f75fc4e491b354bef7b78012/jiter-0.11.1-cp313-cp313t-win_amd64.whl", hash = "sha256:09e2e386ebf298547ca3a3704b729471f7ec666c2906c5c26c1a915ea24741ec", size = 203289, upload-time = "2025-10-17T11:30:03.656Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ee/ace64a853a1acbd318eb0ca167bad1cf5ee037207504b83a868a5849747b/jiter-0.11.1-cp313-cp313t-win_arm64.whl", hash = "sha256:fe4a431c291157e11cee7c34627990ea75e8d153894365a3bc84b7a959d23ca8", size = 188284, upload-time = "2025-10-17T11:30:05.046Z" }, + { url = "https://files.pythonhosted.org/packages/8d/00/d6006d069e7b076e4c66af90656b63da9481954f290d5eca8c715f4bf125/jiter-0.11.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0fa1f70da7a8a9713ff8e5f75ec3f90c0c870be6d526aa95e7c906f6a1c8c676", size = 304624, upload-time = "2025-10-17T11:30:06.678Z" }, + { url = "https://files.pythonhosted.org/packages/fc/45/4a0e31eb996b9ccfddbae4d3017b46f358a599ccf2e19fbffa5e531bd304/jiter-0.11.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:569ee559e5046a42feb6828c55307cf20fe43308e3ae0d8e9e4f8d8634d99944", size = 315042, upload-time = "2025-10-17T11:30:08.87Z" }, + { url = "https://files.pythonhosted.org/packages/e7/91/22f5746f5159a28c76acdc0778801f3c1181799aab196dbea2d29e064968/jiter-0.11.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f69955fa1d92e81987f092b233f0be49d4c937da107b7f7dcf56306f1d3fcce9", size = 346357, upload-time = "2025-10-17T11:30:10.222Z" }, + { url = "https://files.pythonhosted.org/packages/f5/4f/57620857d4e1dc75c8ff4856c90cb6c135e61bff9b4ebfb5dc86814e82d7/jiter-0.11.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:090f4c9d4a825e0fcbd0a2647c9a88a0f366b75654d982d95a9590745ff0c48d", size = 365057, upload-time = "2025-10-17T11:30:11.585Z" }, + { url = "https://files.pythonhosted.org/packages/ce/34/caf7f9cc8ae0a5bb25a5440cc76c7452d264d1b36701b90fdadd28fe08ec/jiter-0.11.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf3d8cedf9e9d825233e0dcac28ff15c47b7c5512fdfe2e25fd5bbb6e6b0cee", size = 487086, upload-time = "2025-10-17T11:30:13.052Z" }, + { url = "https://files.pythonhosted.org/packages/50/17/85b5857c329d533d433fedf98804ebec696004a1f88cabad202b2ddc55cf/jiter-0.11.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2aa9b1958f9c30d3d1a558b75f0626733c60eb9b7774a86b34d88060be1e67fe", size = 376083, upload-time = "2025-10-17T11:30:14.416Z" }, + { url = "https://files.pythonhosted.org/packages/85/d3/2d9f973f828226e6faebdef034097a2918077ea776fb4d88489949024787/jiter-0.11.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42d1ca16590b768c5e7d723055acd2633908baacb3628dd430842e2e035aa90", size = 357825, upload-time = "2025-10-17T11:30:15.765Z" }, + { url = "https://files.pythonhosted.org/packages/f4/55/848d4dabf2c2c236a05468c315c2cb9dc736c5915e65449ccecdba22fb6f/jiter-0.11.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5db4c2486a023820b701a17aec9c5a6173c5ba4393f26662f032f2de9c848b0f", size = 383933, upload-time = "2025-10-17T11:30:17.34Z" }, + { url = "https://files.pythonhosted.org/packages/0b/6c/204c95a4fbb0e26dfa7776c8ef4a878d0c0b215868011cc904bf44f707e2/jiter-0.11.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:4573b78777ccfac954859a6eff45cbd9d281d80c8af049d0f1a3d9fc323d5c3a", size = 517118, upload-time = "2025-10-17T11:30:18.684Z" }, + { url = "https://files.pythonhosted.org/packages/88/25/09956644ea5a2b1e7a2a0f665cb69a973b28f4621fa61fc0c0f06ff40a31/jiter-0.11.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:7593ac6f40831d7961cb67633c39b9fef6689a211d7919e958f45710504f52d3", size = 508194, upload-time = "2025-10-17T11:30:20.719Z" }, + { url = "https://files.pythonhosted.org/packages/09/49/4d1657355d7f5c9e783083a03a3f07d5858efa6916a7d9634d07db1c23bd/jiter-0.11.1-cp314-cp314-win32.whl", hash = "sha256:87202ec6ff9626ff5f9351507def98fcf0df60e9a146308e8ab221432228f4ea", size = 203961, upload-time = "2025-10-17T11:30:22.073Z" }, + { url = "https://files.pythonhosted.org/packages/76/bd/f063bd5cc2712e7ca3cf6beda50894418fc0cfeb3f6ff45a12d87af25996/jiter-0.11.1-cp314-cp314-win_amd64.whl", hash = "sha256:a5dd268f6531a182c89d0dd9a3f8848e86e92dfff4201b77a18e6b98aa59798c", size = 202804, upload-time = "2025-10-17T11:30:23.452Z" }, + { url = "https://files.pythonhosted.org/packages/52/ca/4d84193dfafef1020bf0bedd5e1a8d0e89cb67c54b8519040effc694964b/jiter-0.11.1-cp314-cp314-win_arm64.whl", hash = "sha256:5d761f863f912a44748a21b5c4979c04252588ded8d1d2760976d2e42cd8d991", size = 188001, upload-time = "2025-10-17T11:30:24.915Z" }, + { url = "https://files.pythonhosted.org/packages/d5/fa/3b05e5c9d32efc770a8510eeb0b071c42ae93a5b576fd91cee9af91689a1/jiter-0.11.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2cc5a3965285ddc33e0cab933e96b640bc9ba5940cea27ebbbf6695e72d6511c", size = 312561, upload-time = "2025-10-17T11:30:26.742Z" }, + { url = "https://files.pythonhosted.org/packages/50/d3/335822eb216154ddb79a130cbdce88fdf5c3e2b43dc5dba1fd95c485aaf5/jiter-0.11.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b572b3636a784c2768b2342f36a23078c8d3aa6d8a30745398b1bab58a6f1a8", size = 344551, upload-time = "2025-10-17T11:30:28.252Z" }, + { url = "https://files.pythonhosted.org/packages/31/6d/a0bed13676b1398f9b3ba61f32569f20a3ff270291161100956a577b2dd3/jiter-0.11.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad93e3d67a981f96596d65d2298fe8d1aa649deb5374a2fb6a434410ee11915e", size = 363051, upload-time = "2025-10-17T11:30:30.009Z" }, + { url = "https://files.pythonhosted.org/packages/a4/03/313eda04aa08545a5a04ed5876e52f49ab76a4d98e54578896ca3e16313e/jiter-0.11.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a83097ce379e202dcc3fe3fc71a16d523d1ee9192c8e4e854158f96b3efe3f2f", size = 485897, upload-time = "2025-10-17T11:30:31.429Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/a1011b9d325e40b53b1b96a17c010b8646013417f3902f97a86325b19299/jiter-0.11.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7042c51e7fbeca65631eb0c332f90c0c082eab04334e7ccc28a8588e8e2804d9", size = 375224, upload-time = "2025-10-17T11:30:33.18Z" }, + { url = "https://files.pythonhosted.org/packages/92/da/1b45026b19dd39b419e917165ff0ea629dbb95f374a3a13d2df95e40a6ac/jiter-0.11.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a68d679c0e47649a61df591660507608adc2652442de7ec8276538ac46abe08", size = 356606, upload-time = "2025-10-17T11:30:34.572Z" }, + { url = "https://files.pythonhosted.org/packages/7a/0c/9acb0e54d6a8ba59ce923a180ebe824b4e00e80e56cefde86cc8e0a948be/jiter-0.11.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a1b0da75dbf4b6ec0b3c9e604d1ee8beaf15bc046fff7180f7d89e3cdbd3bb51", size = 384003, upload-time = "2025-10-17T11:30:35.987Z" }, + { url = "https://files.pythonhosted.org/packages/3f/2b/e5a5fe09d6da2145e4eed651e2ce37f3c0cf8016e48b1d302e21fb1628b7/jiter-0.11.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:69dd514bf0fa31c62147d6002e5ca2b3e7ef5894f5ac6f0a19752385f4e89437", size = 516946, upload-time = "2025-10-17T11:30:37.425Z" }, + { url = "https://files.pythonhosted.org/packages/5f/fe/db936e16e0228d48eb81f9934e8327e9fde5185e84f02174fcd22a01be87/jiter-0.11.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:bb31ac0b339efa24c0ca606febd8b77ef11c58d09af1b5f2be4c99e907b11111", size = 507614, upload-time = "2025-10-17T11:30:38.977Z" }, + { url = "https://files.pythonhosted.org/packages/86/db/c4438e8febfb303486d13c6b72f5eb71cf851e300a0c1f0b4140018dd31f/jiter-0.11.1-cp314-cp314t-win32.whl", hash = "sha256:b2ce0d6156a1d3ad41da3eec63b17e03e296b78b0e0da660876fccfada86d2f7", size = 204043, upload-time = "2025-10-17T11:30:40.308Z" }, + { url = "https://files.pythonhosted.org/packages/36/59/81badb169212f30f47f817dfaabf965bc9b8204fed906fab58104ee541f9/jiter-0.11.1-cp314-cp314t-win_amd64.whl", hash = "sha256:f4db07d127b54c4a2d43b4cf05ff0193e4f73e0dd90c74037e16df0b29f666e1", size = 204046, upload-time = "2025-10-17T11:30:41.692Z" }, + { url = "https://files.pythonhosted.org/packages/dd/01/43f7b4eb61db3e565574c4c5714685d042fb652f9eef7e5a3de6aafa943a/jiter-0.11.1-cp314-cp314t-win_arm64.whl", hash = "sha256:28e4fdf2d7ebfc935523e50d1efa3970043cfaa161674fe66f9642409d001dfe", size = 188069, upload-time = "2025-10-17T11:30:43.23Z" }, + { url = "https://files.pythonhosted.org/packages/9d/51/bd41562dd284e2a18b6dc0a99d195fd4a3560d52ab192c42e56fe0316643/jiter-0.11.1-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:e642b5270e61dd02265866398707f90e365b5db2eb65a4f30c789d826682e1f6", size = 306871, upload-time = "2025-10-17T11:31:03.616Z" }, + { url = "https://files.pythonhosted.org/packages/ba/cb/64e7f21dd357e8cd6b3c919c26fac7fc198385bbd1d85bb3b5355600d787/jiter-0.11.1-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:464ba6d000585e4e2fd1e891f31f1231f497273414f5019e27c00a4b8f7a24ad", size = 301454, upload-time = "2025-10-17T11:31:05.338Z" }, + { url = "https://files.pythonhosted.org/packages/55/b0/54bdc00da4ef39801b1419a01035bd8857983de984fd3776b0be6b94add7/jiter-0.11.1-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:055568693ab35e0bf3a171b03bb40b2dcb10352359e0ab9b5ed0da2bf1eb6f6f", size = 336801, upload-time = "2025-10-17T11:31:06.893Z" }, + { url = "https://files.pythonhosted.org/packages/de/8f/87176ed071d42e9db415ed8be787ef4ef31a4fa27f52e6a4fbf34387bd28/jiter-0.11.1-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0c69ea798d08a915ba4478113efa9e694971e410056392f4526d796f136d3fa", size = 343452, upload-time = "2025-10-17T11:31:08.259Z" }, + { url = "https://files.pythonhosted.org/packages/a6/bc/950dd7f170c6394b6fdd73f989d9e729bd98907bcc4430ef080a72d06b77/jiter-0.11.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:0d4d6993edc83cf75e8c6828a8d6ce40a09ee87e38c7bfba6924f39e1337e21d", size = 302626, upload-time = "2025-10-17T11:31:09.645Z" }, + { url = "https://files.pythonhosted.org/packages/3a/65/43d7971ca82ee100b7b9b520573eeef7eabc0a45d490168ebb9a9b5bb8b2/jiter-0.11.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f78d151c83a87a6cf5461d5ee55bc730dd9ae227377ac6f115b922989b95f838", size = 297034, upload-time = "2025-10-17T11:31:10.975Z" }, + { url = "https://files.pythonhosted.org/packages/19/4c/000e1e0c0c67e96557a279f8969487ea2732d6c7311698819f977abae837/jiter-0.11.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9022974781155cd5521d5cb10997a03ee5e31e8454c9d999dcdccd253f2353f", size = 337328, upload-time = "2025-10-17T11:31:12.399Z" }, + { url = "https://files.pythonhosted.org/packages/d9/71/71408b02c6133153336d29fa3ba53000f1e1a3f78bb2fc2d1a1865d2e743/jiter-0.11.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18c77aaa9117510d5bdc6a946baf21b1f0cfa58ef04d31c8d016f206f2118960", size = 343697, upload-time = "2025-10-17T11:31:13.773Z" }, ] [[package]] @@ -493,9 +542,71 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, ] +[[package]] +name = "langchain" +version = "1.0.3" +source = { editable = "../../langchain_v1" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph" }, + { name = "pydantic" }, +] + +[package.metadata] +requires-dist = [ + { name = "langchain-anthropic", marker = "extra == 'anthropic'" }, + { name = "langchain-aws", marker = "extra == 'aws'" }, + { name = "langchain-community", marker = "extra == 'community'" }, + { name = "langchain-core", editable = "../../core" }, + { name = "langchain-deepseek", marker = "extra == 'deepseek'" }, + { name = "langchain-fireworks", marker = "extra == 'fireworks'" }, + { name = "langchain-google-genai", marker = "extra == 'google-genai'" }, + { name = "langchain-google-vertexai", marker = "extra == 'google-vertexai'" }, + { name = "langchain-groq", marker = "extra == 'groq'" }, + { name = "langchain-huggingface", marker = "extra == 'huggingface'" }, + { name = "langchain-mistralai", marker = "extra == 'mistralai'" }, + { name = "langchain-ollama", marker = "extra == 'ollama'" }, + { name = "langchain-openai", marker = "extra == 'openai'", editable = "." }, + { name = "langchain-perplexity", marker = "extra == 'perplexity'" }, + { name = "langchain-together", marker = "extra == 'together'" }, + { name = "langchain-xai", marker = "extra == 'xai'" }, + { name = "langgraph", specifier = ">=1.0.2,<1.1.0" }, + { name = "pydantic", specifier = ">=2.7.4,<3.0.0" }, +] +provides-extras = ["community", "anthropic", "openai", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai", "perplexity"] + +[package.metadata.requires-dev] +lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13.0" }] +test = [ + { name = "langchain-openai", editable = "." }, + { name = "langchain-tests", editable = "../../standard-tests" }, + { name = "pytest", specifier = ">=8.0.0,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.23.2,<2.0.0" }, + { name = "pytest-cov", specifier = ">=4.0.0,<8.0.0" }, + { name = "pytest-mock" }, + { name = "pytest-socket", specifier = ">=0.6.0,<1.0.0" }, + { name = "pytest-watcher", specifier = ">=0.2.6,<1.0.0" }, + { name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" }, + { name = "syrupy", specifier = ">=4.0.2,<5.0.0" }, + { name = "toml", specifier = ">=0.10.2,<1.0.0" }, +] +test-integration = [ + { name = "cassio", specifier = ">=0.1.0,<1.0.0" }, + { name = "langchain-core", editable = "../../core" }, + { name = "langchain-text-splitters", editable = "../../text-splitters" }, + { name = "langchainhub", specifier = ">=0.1.16,<1.0.0" }, + { name = "python-dotenv", specifier = ">=1.0.0,<2.0.0" }, + { name = "vcrpy", specifier = ">=7.0.0,<8.0.0" }, + { name = "wrapt", specifier = ">=1.15.0,<2.0.0" }, +] +typing = [ + { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, + { name = "types-toml", specifier = ">=0.10.8.20240310,<1.0.0.0" }, +] + [[package]] name = "langchain-core" -version = "1.0.0a8" +version = "1.0.2" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -553,7 +664,7 @@ typing = [ [[package]] name = "langchain-openai" -version = "1.0.0a4" +version = "1.0.2" source = { editable = "." } dependencies = [ { name = "langchain-core" }, @@ -570,10 +681,11 @@ lint = [ ] test = [ { name = "freezegun" }, + { name = "langchain" }, { name = "langchain-core" }, { name = "langchain-tests" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, @@ -588,7 +700,7 @@ test = [ test-integration = [ { name = "httpx" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pillow" }, ] typing = [ @@ -609,6 +721,7 @@ dev = [{ name = "langchain-core", editable = "../../core" }] lint = [{ name = "ruff", specifier = ">=0.13.1,<0.14.0" }] test = [ { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, + { name = "langchain", editable = "../../langchain_v1" }, { name = "langchain-core", editable = "../../core" }, { name = "langchain-tests", editable = "../../standard-tests" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, @@ -628,7 +741,7 @@ test-integration = [ { name = "httpx", specifier = ">=0.27.0,<1.0.0" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, - { name = "pillow", specifier = ">=10.3.0,<11.0.0" }, + { name = "pillow", specifier = ">=10.3.0,<12.0.0" }, ] typing = [ { name = "langchain-core", editable = "../../core" }, @@ -638,13 +751,13 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, { name = "langchain-core" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-benchmark" }, @@ -681,9 +794,65 @@ typing = [ { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, ] +[[package]] +name = "langgraph" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph-checkpoint" }, + { name = "langgraph-prebuilt" }, + { name = "langgraph-sdk" }, + { name = "pydantic" }, + { name = "xxhash" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/25/18e6e056ee1a8af64fcab441b4a3f2e158399935b08f148c7718fc42ecdb/langgraph-1.0.2.tar.gz", hash = "sha256:dae1af08d6025cb1fcaed68f502c01af7d634d9044787c853a46c791cfc52f67", size = 482660, upload-time = "2025-10-29T18:38:28.374Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/b1/9f4912e13d4ed691f2685c8a4b764b5a9237a30cca0c5782bc213d9f0a9a/langgraph-1.0.2-py3-none-any.whl", hash = "sha256:b3d56b8c01de857b5fb1da107e8eab6e30512a377685eeedb4f76456724c9729", size = 156751, upload-time = "2025-10-29T18:38:26.577Z" }, +] + +[[package]] +name = "langgraph-checkpoint" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "ormsgpack" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/29/83/6404f6ed23a91d7bc63d7df902d144548434237d017820ceaa8d014035f2/langgraph_checkpoint-2.1.2.tar.gz", hash = "sha256:112e9d067a6eff8937caf198421b1ffba8d9207193f14ac6f89930c1260c06f9", size = 142420, upload-time = "2025-10-07T17:45:17.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/f2/06bf5addf8ee664291e1b9ffa1f28fc9d97e59806dc7de5aea9844cbf335/langgraph_checkpoint-2.1.2-py3-none-any.whl", hash = "sha256:911ebffb069fd01775d4b5184c04aaafc2962fcdf50cf49d524cd4367c4d0c60", size = 45763, upload-time = "2025-10-07T17:45:16.19Z" }, +] + +[[package]] +name = "langgraph-prebuilt" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph-checkpoint" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/2f/b940590436e07b3450fe6d791aad5e581363ad536c4f1771e3ba46530268/langgraph_prebuilt-1.0.2.tar.gz", hash = "sha256:9896dbabf04f086eb59df4294f54ab5bdb21cd78e27e0a10e695dffd1cc6097d", size = 142075, upload-time = "2025-10-29T18:29:00.401Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/2f/9a7d00d4afa036e65294059c7c912002fb72ba5dbbd5c2a871ca06360278/langgraph_prebuilt-1.0.2-py3-none-any.whl", hash = "sha256:d9499f7c449fb637ee7b87e3f6a3b74095f4202053c74d33894bd839ea4c57c7", size = 34286, upload-time = "2025-10-29T18:28:59.26Z" }, +] + +[[package]] +name = "langgraph-sdk" +version = "0.2.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "orjson" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/d8/40e01190a73c564a4744e29a6c902f78d34d43dad9b652a363a92a67059c/langgraph_sdk-0.2.9.tar.gz", hash = "sha256:b3bd04c6be4fa382996cd2be8fbc1e7cc94857d2bc6b6f4599a7f2a245975303", size = 99802, upload-time = "2025-09-20T18:49:14.734Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/05/b2d34e16638241e6f27a6946d28160d4b8b641383787646d41a3727e0896/langgraph_sdk-0.2.9-py3-none-any.whl", hash = "sha256:fbf302edadbf0fb343596f91c597794e936ef68eebc0d3e1d358b6f9f72a1429", size = 56752, upload-time = "2025-09-20T18:49:13.346Z" }, +] + [[package]] name = "langsmith" -version = "0.4.31" +version = "0.4.37" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -694,9 +863,9 @@ dependencies = [ { name = "requests-toolbelt" }, { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/55/f5/edbdf89a162ee025348b3b2080fb3b88f4a1040a5a186f32d34aca913994/langsmith-0.4.31.tar.gz", hash = "sha256:5fb3729e22bd9a225391936cb9d1080322e6c375bb776514af06b56d6c46ed3e", size = 959698, upload-time = "2025-09-25T04:18:19.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/51/58d561dd40ec564509724f0a6a7148aa8090143208ef5d06b73b7fc90d31/langsmith-0.4.37.tar.gz", hash = "sha256:d9a0eb6dd93f89843ac982c9f92be93cf2bcabbe19957f362c547766c7366c71", size = 959089, upload-time = "2025-10-15T22:33:59.465Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/8e/e7a43d907a147e1f87eebdd6737483f9feba52a5d4b20f69d0bd6f2fa22f/langsmith-0.4.31-py3-none-any.whl", hash = "sha256:64f340bdead21defe5f4a6ca330c11073e35444989169f669508edf45a19025f", size = 386347, upload-time = "2025-09-25T04:18:16.69Z" }, + { url = "https://files.pythonhosted.org/packages/14/e8/edff4de49cf364eb9ee88d13da0a555844df32438413bf53d90d507b97cd/langsmith-0.4.37-py3-none-any.whl", hash = "sha256:e34a94ce7277646299e4703a0f6e2d2c43647a28e8b800bb7ef82fd87a0ec766", size = 396111, upload-time = "2025-10-15T22:33:57.392Z" }, ] [[package]] @@ -722,104 +891,140 @@ wheels = [ [[package]] name = "multidict" -version = "6.6.4" +version = "6.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" } +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/6b/86f353088c1358e76fd30b0146947fddecee812703b604ee901e85cd2a80/multidict-6.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b8aa6f0bd8125ddd04a6593437bad6a7e70f300ff4180a531654aa2ab3f6d58f", size = 77054, upload-time = "2025-08-11T12:06:02.99Z" }, - { url = "https://files.pythonhosted.org/packages/19/5d/c01dc3d3788bb877bd7f5753ea6eb23c1beeca8044902a8f5bfb54430f63/multidict-6.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b9e5853bbd7264baca42ffc53391b490d65fe62849bf2c690fa3f6273dbcd0cb", size = 44914, upload-time = "2025-08-11T12:06:05.264Z" }, - { url = "https://files.pythonhosted.org/packages/46/44/964dae19ea42f7d3e166474d8205f14bb811020e28bc423d46123ddda763/multidict-6.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0af5f9dee472371e36d6ae38bde009bd8ce65ac7335f55dcc240379d7bed1495", size = 44601, upload-time = "2025-08-11T12:06:06.627Z" }, - { url = "https://files.pythonhosted.org/packages/31/20/0616348a1dfb36cb2ab33fc9521de1f27235a397bf3f59338e583afadd17/multidict-6.6.4-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:d24f351e4d759f5054b641c81e8291e5d122af0fca5c72454ff77f7cbe492de8", size = 224821, upload-time = "2025-08-11T12:06:08.06Z" }, - { url = "https://files.pythonhosted.org/packages/14/26/5d8923c69c110ff51861af05bd27ca6783011b96725d59ccae6d9daeb627/multidict-6.6.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db6a3810eec08280a172a6cd541ff4a5f6a97b161d93ec94e6c4018917deb6b7", size = 242608, upload-time = "2025-08-11T12:06:09.697Z" }, - { url = "https://files.pythonhosted.org/packages/5c/cc/e2ad3ba9459aa34fa65cf1f82a5c4a820a2ce615aacfb5143b8817f76504/multidict-6.6.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a1b20a9d56b2d81e2ff52ecc0670d583eaabaa55f402e8d16dd062373dbbe796", size = 222324, upload-time = "2025-08-11T12:06:10.905Z" }, - { url = "https://files.pythonhosted.org/packages/19/db/4ed0f65701afbc2cb0c140d2d02928bb0fe38dd044af76e58ad7c54fd21f/multidict-6.6.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8c9854df0eaa610a23494c32a6f44a3a550fb398b6b51a56e8c6b9b3689578db", size = 253234, upload-time = "2025-08-11T12:06:12.658Z" }, - { url = "https://files.pythonhosted.org/packages/94/c1/5160c9813269e39ae14b73debb907bfaaa1beee1762da8c4fb95df4764ed/multidict-6.6.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4bb7627fd7a968f41905a4d6343b0d63244a0623f006e9ed989fa2b78f4438a0", size = 251613, upload-time = "2025-08-11T12:06:13.97Z" }, - { url = "https://files.pythonhosted.org/packages/05/a9/48d1bd111fc2f8fb98b2ed7f9a115c55a9355358432a19f53c0b74d8425d/multidict-6.6.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caebafea30ed049c57c673d0b36238b1748683be2593965614d7b0e99125c877", size = 241649, upload-time = "2025-08-11T12:06:15.204Z" }, - { url = "https://files.pythonhosted.org/packages/85/2a/f7d743df0019408768af8a70d2037546a2be7b81fbb65f040d76caafd4c5/multidict-6.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ad887a8250eb47d3ab083d2f98db7f48098d13d42eb7a3b67d8a5c795f224ace", size = 239238, upload-time = "2025-08-11T12:06:16.467Z" }, - { url = "https://files.pythonhosted.org/packages/cb/b8/4f4bb13323c2d647323f7919201493cf48ebe7ded971717bfb0f1a79b6bf/multidict-6.6.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:ed8358ae7d94ffb7c397cecb62cbac9578a83ecefc1eba27b9090ee910e2efb6", size = 233517, upload-time = "2025-08-11T12:06:18.107Z" }, - { url = "https://files.pythonhosted.org/packages/33/29/4293c26029ebfbba4f574febd2ed01b6f619cfa0d2e344217d53eef34192/multidict-6.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ecab51ad2462197a4c000b6d5701fc8585b80eecb90583635d7e327b7b6923eb", size = 243122, upload-time = "2025-08-11T12:06:19.361Z" }, - { url = "https://files.pythonhosted.org/packages/20/60/a1c53628168aa22447bfde3a8730096ac28086704a0d8c590f3b63388d0c/multidict-6.6.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c5c97aa666cf70e667dfa5af945424ba1329af5dd988a437efeb3a09430389fb", size = 248992, upload-time = "2025-08-11T12:06:20.661Z" }, - { url = "https://files.pythonhosted.org/packages/a3/3b/55443a0c372f33cae5d9ec37a6a973802884fa0ab3586659b197cf8cc5e9/multidict-6.6.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:9a950b7cf54099c1209f455ac5970b1ea81410f2af60ed9eb3c3f14f0bfcf987", size = 243708, upload-time = "2025-08-11T12:06:21.891Z" }, - { url = "https://files.pythonhosted.org/packages/7c/60/a18c6900086769312560b2626b18e8cca22d9e85b1186ba77f4755b11266/multidict-6.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:163c7ea522ea9365a8a57832dea7618e6cbdc3cd75f8c627663587459a4e328f", size = 237498, upload-time = "2025-08-11T12:06:23.206Z" }, - { url = "https://files.pythonhosted.org/packages/11/3d/8bdd8bcaff2951ce2affccca107a404925a2beafedd5aef0b5e4a71120a6/multidict-6.6.4-cp310-cp310-win32.whl", hash = "sha256:17d2cbbfa6ff20821396b25890f155f40c986f9cfbce5667759696d83504954f", size = 41415, upload-time = "2025-08-11T12:06:24.77Z" }, - { url = "https://files.pythonhosted.org/packages/c0/53/cab1ad80356a4cd1b685a254b680167059b433b573e53872fab245e9fc95/multidict-6.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:ce9a40fbe52e57e7edf20113a4eaddfacac0561a0879734e636aa6d4bb5e3fb0", size = 46046, upload-time = "2025-08-11T12:06:25.893Z" }, - { url = "https://files.pythonhosted.org/packages/cf/9a/874212b6f5c1c2d870d0a7adc5bb4cfe9b0624fa15cdf5cf757c0f5087ae/multidict-6.6.4-cp310-cp310-win_arm64.whl", hash = "sha256:01d0959807a451fe9fdd4da3e139cb5b77f7328baf2140feeaf233e1d777b729", size = 43147, upload-time = "2025-08-11T12:06:27.534Z" }, - { url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" }, - { url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" }, - { url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" }, - { url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" }, - { url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" }, - { url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" }, - { url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" }, - { url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" }, - { url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" }, - { url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" }, - { url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" }, - { url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" }, - { url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" }, - { url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" }, - { url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" }, - { url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" }, - { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" }, - { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" }, - { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" }, - { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" }, - { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" }, - { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" }, - { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" }, - { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" }, - { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" }, - { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" }, - { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" }, - { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" }, - { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" }, - { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" }, - { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" }, - { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" }, - { url = "https://files.pythonhosted.org/packages/3a/5d/e1db626f64f60008320aab00fbe4f23fc3300d75892a3381275b3d284580/multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e", size = 75848, upload-time = "2025-08-11T12:07:19.912Z" }, - { url = "https://files.pythonhosted.org/packages/4c/aa/8b6f548d839b6c13887253af4e29c939af22a18591bfb5d0ee6f1931dae8/multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657", size = 45060, upload-time = "2025-08-11T12:07:21.163Z" }, - { url = "https://files.pythonhosted.org/packages/eb/c6/f5e97e5d99a729bc2aa58eb3ebfa9f1e56a9b517cc38c60537c81834a73f/multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da", size = 43269, upload-time = "2025-08-11T12:07:22.392Z" }, - { url = "https://files.pythonhosted.org/packages/dc/31/d54eb0c62516776f36fe67f84a732f97e0b0e12f98d5685bebcc6d396910/multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa", size = 237158, upload-time = "2025-08-11T12:07:23.636Z" }, - { url = "https://files.pythonhosted.org/packages/c4/1c/8a10c1c25b23156e63b12165a929d8eb49a6ed769fdbefb06e6f07c1e50d/multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f", size = 257076, upload-time = "2025-08-11T12:07:25.049Z" }, - { url = "https://files.pythonhosted.org/packages/ad/86/90e20b5771d6805a119e483fd3d1e8393e745a11511aebca41f0da38c3e2/multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0", size = 240694, upload-time = "2025-08-11T12:07:26.458Z" }, - { url = "https://files.pythonhosted.org/packages/e7/49/484d3e6b535bc0555b52a0a26ba86e4d8d03fd5587d4936dc59ba7583221/multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879", size = 266350, upload-time = "2025-08-11T12:07:27.94Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b4/aa4c5c379b11895083d50021e229e90c408d7d875471cb3abf721e4670d6/multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a", size = 267250, upload-time = "2025-08-11T12:07:29.303Z" }, - { url = "https://files.pythonhosted.org/packages/80/e5/5e22c5bf96a64bdd43518b1834c6d95a4922cc2066b7d8e467dae9b6cee6/multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f", size = 254900, upload-time = "2025-08-11T12:07:30.764Z" }, - { url = "https://files.pythonhosted.org/packages/17/38/58b27fed927c07035abc02befacab42491e7388ca105e087e6e0215ead64/multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5", size = 252355, upload-time = "2025-08-11T12:07:32.205Z" }, - { url = "https://files.pythonhosted.org/packages/d0/a1/dad75d23a90c29c02b5d6f3d7c10ab36c3197613be5d07ec49c7791e186c/multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438", size = 250061, upload-time = "2025-08-11T12:07:33.623Z" }, - { url = "https://files.pythonhosted.org/packages/b8/1a/ac2216b61c7f116edab6dc3378cca6c70dc019c9a457ff0d754067c58b20/multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e", size = 249675, upload-time = "2025-08-11T12:07:34.958Z" }, - { url = "https://files.pythonhosted.org/packages/d4/79/1916af833b800d13883e452e8e0977c065c4ee3ab7a26941fbfdebc11895/multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7", size = 261247, upload-time = "2025-08-11T12:07:36.588Z" }, - { url = "https://files.pythonhosted.org/packages/c5/65/d1f84fe08ac44a5fc7391cbc20a7cedc433ea616b266284413fd86062f8c/multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812", size = 257960, upload-time = "2025-08-11T12:07:39.735Z" }, - { url = "https://files.pythonhosted.org/packages/13/b5/29ec78057d377b195ac2c5248c773703a6b602e132a763e20ec0457e7440/multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a", size = 250078, upload-time = "2025-08-11T12:07:41.525Z" }, - { url = "https://files.pythonhosted.org/packages/c4/0e/7e79d38f70a872cae32e29b0d77024bef7834b0afb406ddae6558d9e2414/multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69", size = 41708, upload-time = "2025-08-11T12:07:43.405Z" }, - { url = "https://files.pythonhosted.org/packages/9d/34/746696dffff742e97cd6a23da953e55d0ea51fa601fa2ff387b3edcfaa2c/multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf", size = 45912, upload-time = "2025-08-11T12:07:45.082Z" }, - { url = "https://files.pythonhosted.org/packages/c7/87/3bac136181e271e29170d8d71929cdeddeb77f3e8b6a0c08da3a8e9da114/multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605", size = 43076, upload-time = "2025-08-11T12:07:46.746Z" }, - { url = "https://files.pythonhosted.org/packages/64/94/0a8e63e36c049b571c9ae41ee301ada29c3fee9643d9c2548d7d558a1d99/multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb", size = 82812, upload-time = "2025-08-11T12:07:48.402Z" }, - { url = "https://files.pythonhosted.org/packages/25/1a/be8e369dfcd260d2070a67e65dd3990dd635cbd735b98da31e00ea84cd4e/multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e", size = 48313, upload-time = "2025-08-11T12:07:49.679Z" }, - { url = "https://files.pythonhosted.org/packages/26/5a/dd4ade298674b2f9a7b06a32c94ffbc0497354df8285f27317c66433ce3b/multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f", size = 46777, upload-time = "2025-08-11T12:07:51.318Z" }, - { url = "https://files.pythonhosted.org/packages/89/db/98aa28bc7e071bfba611ac2ae803c24e96dd3a452b4118c587d3d872c64c/multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773", size = 229321, upload-time = "2025-08-11T12:07:52.965Z" }, - { url = "https://files.pythonhosted.org/packages/c7/bc/01ddda2a73dd9d167bd85d0e8ef4293836a8f82b786c63fb1a429bc3e678/multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e", size = 249954, upload-time = "2025-08-11T12:07:54.423Z" }, - { url = "https://files.pythonhosted.org/packages/06/78/6b7c0f020f9aa0acf66d0ab4eb9f08375bac9a50ff5e3edb1c4ccd59eafc/multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0", size = 228612, upload-time = "2025-08-11T12:07:55.914Z" }, - { url = "https://files.pythonhosted.org/packages/00/44/3faa416f89b2d5d76e9d447296a81521e1c832ad6e40b92f990697b43192/multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395", size = 257528, upload-time = "2025-08-11T12:07:57.371Z" }, - { url = "https://files.pythonhosted.org/packages/05/5f/77c03b89af0fcb16f018f668207768191fb9dcfb5e3361a5e706a11db2c9/multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45", size = 256329, upload-time = "2025-08-11T12:07:58.844Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e9/ed750a2a9afb4f8dc6f13dc5b67b514832101b95714f1211cd42e0aafc26/multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb", size = 247928, upload-time = "2025-08-11T12:08:01.037Z" }, - { url = "https://files.pythonhosted.org/packages/1f/b5/e0571bc13cda277db7e6e8a532791d4403dacc9850006cb66d2556e649c0/multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5", size = 245228, upload-time = "2025-08-11T12:08:02.96Z" }, - { url = "https://files.pythonhosted.org/packages/f3/a3/69a84b0eccb9824491f06368f5b86e72e4af54c3067c37c39099b6687109/multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141", size = 235869, upload-time = "2025-08-11T12:08:04.746Z" }, - { url = "https://files.pythonhosted.org/packages/a9/9d/28802e8f9121a6a0804fa009debf4e753d0a59969ea9f70be5f5fdfcb18f/multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d", size = 243446, upload-time = "2025-08-11T12:08:06.332Z" }, - { url = "https://files.pythonhosted.org/packages/38/ea/6c98add069b4878c1d66428a5f5149ddb6d32b1f9836a826ac764b9940be/multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d", size = 252299, upload-time = "2025-08-11T12:08:07.931Z" }, - { url = "https://files.pythonhosted.org/packages/3a/09/8fe02d204473e14c0af3affd50af9078839dfca1742f025cca765435d6b4/multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0", size = 246926, upload-time = "2025-08-11T12:08:09.467Z" }, - { url = "https://files.pythonhosted.org/packages/37/3d/7b1e10d774a6df5175ecd3c92bff069e77bed9ec2a927fdd4ff5fe182f67/multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92", size = 243383, upload-time = "2025-08-11T12:08:10.981Z" }, - { url = "https://files.pythonhosted.org/packages/50/b0/a6fae46071b645ae98786ab738447de1ef53742eaad949f27e960864bb49/multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e", size = 47775, upload-time = "2025-08-11T12:08:12.439Z" }, - { url = "https://files.pythonhosted.org/packages/b2/0a/2436550b1520091af0600dff547913cb2d66fbac27a8c33bc1b1bccd8d98/multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4", size = 53100, upload-time = "2025-08-11T12:08:13.823Z" }, - { url = "https://files.pythonhosted.org/packages/97/ea/43ac51faff934086db9c072a94d327d71b7d8b40cd5dcb47311330929ef0/multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad", size = 45501, upload-time = "2025-08-11T12:08:15.173Z" }, - { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, + { url = "https://files.pythonhosted.org/packages/a9/63/7bdd4adc330abcca54c85728db2327130e49e52e8c3ce685cec44e0f2e9f/multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349", size = 77153, upload-time = "2025-10-06T14:48:26.409Z" }, + { url = "https://files.pythonhosted.org/packages/3f/bb/b6c35ff175ed1a3142222b78455ee31be71a8396ed3ab5280fbe3ebe4e85/multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e", size = 44993, upload-time = "2025-10-06T14:48:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/e0/1f/064c77877c5fa6df6d346e68075c0f6998547afe952d6471b4c5f6a7345d/multidict-6.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3", size = 44607, upload-time = "2025-10-06T14:48:29.581Z" }, + { url = "https://files.pythonhosted.org/packages/04/7a/bf6aa92065dd47f287690000b3d7d332edfccb2277634cadf6a810463c6a/multidict-6.7.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046", size = 241847, upload-time = "2025-10-06T14:48:32.107Z" }, + { url = "https://files.pythonhosted.org/packages/94/39/297a8de920f76eda343e4ce05f3b489f0ab3f9504f2576dfb37b7c08ca08/multidict-6.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32", size = 242616, upload-time = "2025-10-06T14:48:34.054Z" }, + { url = "https://files.pythonhosted.org/packages/39/3a/d0eee2898cfd9d654aea6cb8c4addc2f9756e9a7e09391cfe55541f917f7/multidict-6.7.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73", size = 222333, upload-time = "2025-10-06T14:48:35.9Z" }, + { url = "https://files.pythonhosted.org/packages/05/48/3b328851193c7a4240815b71eea165b49248867bbb6153a0aee227a0bb47/multidict-6.7.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc", size = 253239, upload-time = "2025-10-06T14:48:37.302Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ca/0706a98c8d126a89245413225ca4a3fefc8435014de309cf8b30acb68841/multidict-6.7.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62", size = 251618, upload-time = "2025-10-06T14:48:38.963Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/9c7992f245554d8b173f6f0a048ad24b3e645d883f096857ec2c0822b8bd/multidict-6.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84", size = 241655, upload-time = "2025-10-06T14:48:40.312Z" }, + { url = "https://files.pythonhosted.org/packages/31/79/26a85991ae67efd1c0b1fc2e0c275b8a6aceeb155a68861f63f87a798f16/multidict-6.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0", size = 239245, upload-time = "2025-10-06T14:48:41.848Z" }, + { url = "https://files.pythonhosted.org/packages/14/1e/75fa96394478930b79d0302eaf9a6c69f34005a1a5251ac8b9c336486ec9/multidict-6.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e", size = 233523, upload-time = "2025-10-06T14:48:43.749Z" }, + { url = "https://files.pythonhosted.org/packages/b2/5e/085544cb9f9c4ad2b5d97467c15f856df8d9bac410cffd5c43991a5d878b/multidict-6.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4", size = 243129, upload-time = "2025-10-06T14:48:45.225Z" }, + { url = "https://files.pythonhosted.org/packages/b9/c3/e9d9e2f20c9474e7a8fcef28f863c5cbd29bb5adce6b70cebe8bdad0039d/multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648", size = 248999, upload-time = "2025-10-06T14:48:46.703Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3f/df171b6efa3239ae33b97b887e42671cd1d94d460614bfb2c30ffdab3b95/multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111", size = 243711, upload-time = "2025-10-06T14:48:48.146Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2f/9b5564888c4e14b9af64c54acf149263721a283aaf4aa0ae89b091d5d8c1/multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36", size = 237504, upload-time = "2025-10-06T14:48:49.447Z" }, + { url = "https://files.pythonhosted.org/packages/6c/3a/0bd6ca0f7d96d790542d591c8c3354c1e1b6bfd2024d4d92dc3d87485ec7/multidict-6.7.0-cp310-cp310-win32.whl", hash = "sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85", size = 41422, upload-time = "2025-10-06T14:48:50.789Z" }, + { url = "https://files.pythonhosted.org/packages/00/35/f6a637ea2c75f0d3b7c7d41b1189189acff0d9deeb8b8f35536bb30f5e33/multidict-6.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7", size = 46050, upload-time = "2025-10-06T14:48:51.938Z" }, + { url = "https://files.pythonhosted.org/packages/e7/b8/f7bf8329b39893d02d9d95cf610c75885d12fc0f402b1c894e1c8e01c916/multidict-6.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0", size = 43153, upload-time = "2025-10-06T14:48:53.146Z" }, + { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, + { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, + { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, + { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, + { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" }, + { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" }, + { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342, upload-time = "2025-10-06T14:49:58.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082, upload-time = "2025-10-06T14:49:59.89Z" }, + { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704, upload-time = "2025-10-06T14:50:01.485Z" }, + { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355, upload-time = "2025-10-06T14:50:02.955Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259, upload-time = "2025-10-06T14:50:04.446Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903, upload-time = "2025-10-06T14:50:05.98Z" }, + { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365, upload-time = "2025-10-06T14:50:07.511Z" }, + { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062, upload-time = "2025-10-06T14:50:09.074Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683, upload-time = "2025-10-06T14:50:10.714Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" }, + { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" }, + { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" }, + { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" }, + { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" }, + { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" }, + { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" }, + { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" }, + { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" }, + { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588, upload-time = "2025-10-06T14:50:25.716Z" }, + { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966, upload-time = "2025-10-06T14:50:28.192Z" }, + { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618, upload-time = "2025-10-06T14:50:29.82Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539, upload-time = "2025-10-06T14:50:31.731Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345, upload-time = "2025-10-06T14:50:33.26Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934, upload-time = "2025-10-06T14:50:34.808Z" }, + { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243, upload-time = "2025-10-06T14:50:36.436Z" }, + { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878, upload-time = "2025-10-06T14:50:37.953Z" }, + { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452, upload-time = "2025-10-06T14:50:39.574Z" }, + { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" }, + { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, + { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, + { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128, upload-time = "2025-10-06T14:50:51.92Z" }, + { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410, upload-time = "2025-10-06T14:50:53.275Z" }, + { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205, upload-time = "2025-10-06T14:50:54.911Z" }, + { url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128", size = 245084, upload-time = "2025-10-06T14:50:56.369Z" }, + { url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34", size = 252667, upload-time = "2025-10-06T14:50:57.991Z" }, + { url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99", size = 233590, upload-time = "2025-10-06T14:50:59.589Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202", size = 264112, upload-time = "2025-10-06T14:51:01.183Z" }, + { url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1", size = 261194, upload-time = "2025-10-06T14:51:02.794Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3", size = 248510, upload-time = "2025-10-06T14:51:04.724Z" }, + { url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d", size = 248395, upload-time = "2025-10-06T14:51:06.306Z" }, + { url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6", size = 239520, upload-time = "2025-10-06T14:51:08.091Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7", size = 245479, upload-time = "2025-10-06T14:51:10.365Z" }, + { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903, upload-time = "2025-10-06T14:51:12.466Z" }, + { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333, upload-time = "2025-10-06T14:51:14.48Z" }, + { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411, upload-time = "2025-10-06T14:51:16.072Z" }, + { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940, upload-time = "2025-10-06T14:51:17.544Z" }, + { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087, upload-time = "2025-10-06T14:51:18.875Z" }, + { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368, upload-time = "2025-10-06T14:51:20.225Z" }, + { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326, upload-time = "2025-10-06T14:51:21.588Z" }, + { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065, upload-time = "2025-10-06T14:51:22.93Z" }, + { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475, upload-time = "2025-10-06T14:51:24.352Z" }, + { url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e", size = 239324, upload-time = "2025-10-06T14:51:25.822Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064", size = 246877, upload-time = "2025-10-06T14:51:27.604Z" }, + { url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e", size = 225824, upload-time = "2025-10-06T14:51:29.664Z" }, + { url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd", size = 253558, upload-time = "2025-10-06T14:51:31.684Z" }, + { url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a", size = 252339, upload-time = "2025-10-06T14:51:33.699Z" }, + { url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96", size = 244895, upload-time = "2025-10-06T14:51:36.189Z" }, + { url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e", size = 241862, upload-time = "2025-10-06T14:51:41.291Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599", size = 232376, upload-time = "2025-10-06T14:51:43.55Z" }, + { url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394", size = 240272, upload-time = "2025-10-06T14:51:45.265Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774, upload-time = "2025-10-06T14:51:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731, upload-time = "2025-10-06T14:51:48.541Z" }, + { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193, upload-time = "2025-10-06T14:51:50.355Z" }, + { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023, upload-time = "2025-10-06T14:51:51.883Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507, upload-time = "2025-10-06T14:51:53.672Z" }, + { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804, upload-time = "2025-10-06T14:51:55.415Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] [[package]] @@ -944,7 +1149,7 @@ wheels = [ [[package]] name = "numpy" -version = "2.3.3" +version = "2.3.4" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -952,86 +1157,86 @@ resolution-markers = [ "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", ] -sdist = { url = "https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029", size = 20576648, upload-time = "2025-09-09T16:54:12.543Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/f4/098d2270d52b41f1bd7db9fc288aaa0400cb48c2a3e2af6fa365d9720947/numpy-2.3.4.tar.gz", hash = "sha256:a7d018bfedb375a8d979ac758b120ba846a7fe764911a64465fd87b8729f4a6a", size = 20582187, upload-time = "2025-10-15T16:18:11.77Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/45/e80d203ef6b267aa29b22714fb558930b27960a0c5ce3c19c999232bb3eb/numpy-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ffc4f5caba7dfcbe944ed674b7eef683c7e94874046454bb79ed7ee0236f59d", size = 21259253, upload-time = "2025-09-09T15:56:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/52/18/cf2c648fccf339e59302e00e5f2bc87725a3ce1992f30f3f78c9044d7c43/numpy-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7e946c7170858a0295f79a60214424caac2ffdb0063d4d79cb681f9aa0aa569", size = 14450980, upload-time = "2025-09-09T15:56:05.926Z" }, - { url = "https://files.pythonhosted.org/packages/93/fb/9af1082bec870188c42a1c239839915b74a5099c392389ff04215dcee812/numpy-2.3.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:cd4260f64bc794c3390a63bf0728220dd1a68170c169088a1e0dfa2fde1be12f", size = 5379709, upload-time = "2025-09-09T15:56:07.95Z" }, - { url = "https://files.pythonhosted.org/packages/75/0f/bfd7abca52bcbf9a4a65abc83fe18ef01ccdeb37bfb28bbd6ad613447c79/numpy-2.3.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f0ddb4b96a87b6728df9362135e764eac3cfa674499943ebc44ce96c478ab125", size = 6913923, upload-time = "2025-09-09T15:56:09.443Z" }, - { url = "https://files.pythonhosted.org/packages/79/55/d69adad255e87ab7afda1caf93ca997859092afeb697703e2f010f7c2e55/numpy-2.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afd07d377f478344ec6ca2b8d4ca08ae8bd44706763d1efb56397de606393f48", size = 14589591, upload-time = "2025-09-09T15:56:11.234Z" }, - { url = "https://files.pythonhosted.org/packages/10/a2/010b0e27ddeacab7839957d7a8f00e91206e0c2c47abbb5f35a2630e5387/numpy-2.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc92a5dedcc53857249ca51ef29f5e5f2f8c513e22cfb90faeb20343b8c6f7a6", size = 16938714, upload-time = "2025-09-09T15:56:14.637Z" }, - { url = "https://files.pythonhosted.org/packages/1c/6b/12ce8ede632c7126eb2762b9e15e18e204b81725b81f35176eac14dc5b82/numpy-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7af05ed4dc19f308e1d9fc759f36f21921eb7bbfc82843eeec6b2a2863a0aefa", size = 16370592, upload-time = "2025-09-09T15:56:17.285Z" }, - { url = "https://files.pythonhosted.org/packages/b4/35/aba8568b2593067bb6a8fe4c52babb23b4c3b9c80e1b49dff03a09925e4a/numpy-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433bf137e338677cebdd5beac0199ac84712ad9d630b74eceeb759eaa45ddf30", size = 18884474, upload-time = "2025-09-09T15:56:20.943Z" }, - { url = "https://files.pythonhosted.org/packages/45/fa/7f43ba10c77575e8be7b0138d107e4f44ca4a1ef322cd16980ea3e8b8222/numpy-2.3.3-cp311-cp311-win32.whl", hash = "sha256:eb63d443d7b4ffd1e873f8155260d7f58e7e4b095961b01c91062935c2491e57", size = 6599794, upload-time = "2025-09-09T15:56:23.258Z" }, - { url = "https://files.pythonhosted.org/packages/0a/a2/a4f78cb2241fe5664a22a10332f2be886dcdea8784c9f6a01c272da9b426/numpy-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:ec9d249840f6a565f58d8f913bccac2444235025bbb13e9a4681783572ee3caa", size = 13088104, upload-time = "2025-09-09T15:56:25.476Z" }, - { url = "https://files.pythonhosted.org/packages/79/64/e424e975adbd38282ebcd4891661965b78783de893b381cbc4832fb9beb2/numpy-2.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:74c2a948d02f88c11a3c075d9733f1ae67d97c6bdb97f2bb542f980458b257e7", size = 10460772, upload-time = "2025-09-09T15:56:27.679Z" }, - { url = "https://files.pythonhosted.org/packages/51/5d/bb7fc075b762c96329147799e1bcc9176ab07ca6375ea976c475482ad5b3/numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf", size = 20957014, upload-time = "2025-09-09T15:56:29.966Z" }, - { url = "https://files.pythonhosted.org/packages/6b/0e/c6211bb92af26517acd52125a237a92afe9c3124c6a68d3b9f81b62a0568/numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25", size = 14185220, upload-time = "2025-09-09T15:56:32.175Z" }, - { url = "https://files.pythonhosted.org/packages/22/f2/07bb754eb2ede9073f4054f7c0286b0d9d2e23982e090a80d478b26d35ca/numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe", size = 5113918, upload-time = "2025-09-09T15:56:34.175Z" }, - { url = "https://files.pythonhosted.org/packages/81/0a/afa51697e9fb74642f231ea36aca80fa17c8fb89f7a82abd5174023c3960/numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b", size = 6647922, upload-time = "2025-09-09T15:56:36.149Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f5/122d9cdb3f51c520d150fef6e87df9279e33d19a9611a87c0d2cf78a89f4/numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8", size = 14281991, upload-time = "2025-09-09T15:56:40.548Z" }, - { url = "https://files.pythonhosted.org/packages/51/64/7de3c91e821a2debf77c92962ea3fe6ac2bc45d0778c1cbe15d4fce2fd94/numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20", size = 16641643, upload-time = "2025-09-09T15:56:43.343Z" }, - { url = "https://files.pythonhosted.org/packages/30/e4/961a5fa681502cd0d68907818b69f67542695b74e3ceaa513918103b7e80/numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea", size = 16056787, upload-time = "2025-09-09T15:56:46.141Z" }, - { url = "https://files.pythonhosted.org/packages/99/26/92c912b966e47fbbdf2ad556cb17e3a3088e2e1292b9833be1dfa5361a1a/numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7", size = 18579598, upload-time = "2025-09-09T15:56:49.844Z" }, - { url = "https://files.pythonhosted.org/packages/17/b6/fc8f82cb3520768718834f310c37d96380d9dc61bfdaf05fe5c0b7653e01/numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf", size = 6320800, upload-time = "2025-09-09T15:56:52.499Z" }, - { url = "https://files.pythonhosted.org/packages/32/ee/de999f2625b80d043d6d2d628c07d0d5555a677a3cf78fdf868d409b8766/numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb", size = 12786615, upload-time = "2025-09-09T15:56:54.422Z" }, - { url = "https://files.pythonhosted.org/packages/49/6e/b479032f8a43559c383acb20816644f5f91c88f633d9271ee84f3b3a996c/numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5", size = 10195936, upload-time = "2025-09-09T15:56:56.541Z" }, - { url = "https://files.pythonhosted.org/packages/7d/b9/984c2b1ee61a8b803bf63582b4ac4242cf76e2dbd663efeafcb620cc0ccb/numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf", size = 20949588, upload-time = "2025-09-09T15:56:59.087Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e4/07970e3bed0b1384d22af1e9912527ecbeb47d3b26e9b6a3bced068b3bea/numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7", size = 14177802, upload-time = "2025-09-09T15:57:01.73Z" }, - { url = "https://files.pythonhosted.org/packages/35/c7/477a83887f9de61f1203bad89cf208b7c19cc9fef0cebef65d5a1a0619f2/numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6", size = 5106537, upload-time = "2025-09-09T15:57:03.765Z" }, - { url = "https://files.pythonhosted.org/packages/52/47/93b953bd5866a6f6986344d045a207d3f1cfbad99db29f534ea9cee5108c/numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7", size = 6640743, upload-time = "2025-09-09T15:57:07.921Z" }, - { url = "https://files.pythonhosted.org/packages/23/83/377f84aaeb800b64c0ef4de58b08769e782edcefa4fea712910b6f0afd3c/numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c", size = 14278881, upload-time = "2025-09-09T15:57:11.349Z" }, - { url = "https://files.pythonhosted.org/packages/9a/a5/bf3db6e66c4b160d6ea10b534c381a1955dfab34cb1017ea93aa33c70ed3/numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93", size = 16636301, upload-time = "2025-09-09T15:57:14.245Z" }, - { url = "https://files.pythonhosted.org/packages/a2/59/1287924242eb4fa3f9b3a2c30400f2e17eb2707020d1c5e3086fe7330717/numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae", size = 16053645, upload-time = "2025-09-09T15:57:16.534Z" }, - { url = "https://files.pythonhosted.org/packages/e6/93/b3d47ed882027c35e94ac2320c37e452a549f582a5e801f2d34b56973c97/numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86", size = 18578179, upload-time = "2025-09-09T15:57:18.883Z" }, - { url = "https://files.pythonhosted.org/packages/20/d9/487a2bccbf7cc9d4bfc5f0f197761a5ef27ba870f1e3bbb9afc4bbe3fcc2/numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8", size = 6312250, upload-time = "2025-09-09T15:57:21.296Z" }, - { url = "https://files.pythonhosted.org/packages/1b/b5/263ebbbbcede85028f30047eab3d58028d7ebe389d6493fc95ae66c636ab/numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf", size = 12783269, upload-time = "2025-09-09T15:57:23.034Z" }, - { url = "https://files.pythonhosted.org/packages/fa/75/67b8ca554bbeaaeb3fac2e8bce46967a5a06544c9108ec0cf5cece559b6c/numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5", size = 10195314, upload-time = "2025-09-09T15:57:25.045Z" }, - { url = "https://files.pythonhosted.org/packages/11/d0/0d1ddec56b162042ddfafeeb293bac672de9b0cfd688383590090963720a/numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc", size = 21048025, upload-time = "2025-09-09T15:57:27.257Z" }, - { url = "https://files.pythonhosted.org/packages/36/9e/1996ca6b6d00415b6acbdd3c42f7f03ea256e2c3f158f80bd7436a8a19f3/numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc", size = 14301053, upload-time = "2025-09-09T15:57:30.077Z" }, - { url = "https://files.pythonhosted.org/packages/05/24/43da09aa764c68694b76e84b3d3f0c44cb7c18cdc1ba80e48b0ac1d2cd39/numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b", size = 5229444, upload-time = "2025-09-09T15:57:32.733Z" }, - { url = "https://files.pythonhosted.org/packages/bc/14/50ffb0f22f7218ef8af28dd089f79f68289a7a05a208db9a2c5dcbe123c1/numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19", size = 6738039, upload-time = "2025-09-09T15:57:34.328Z" }, - { url = "https://files.pythonhosted.org/packages/55/52/af46ac0795e09657d45a7f4db961917314377edecf66db0e39fa7ab5c3d3/numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30", size = 14352314, upload-time = "2025-09-09T15:57:36.255Z" }, - { url = "https://files.pythonhosted.org/packages/a7/b1/dc226b4c90eb9f07a3fff95c2f0db3268e2e54e5cce97c4ac91518aee71b/numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e", size = 16701722, upload-time = "2025-09-09T15:57:38.622Z" }, - { url = "https://files.pythonhosted.org/packages/9d/9d/9d8d358f2eb5eced14dba99f110d83b5cd9a4460895230f3b396ad19a323/numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3", size = 16132755, upload-time = "2025-09-09T15:57:41.16Z" }, - { url = "https://files.pythonhosted.org/packages/b6/27/b3922660c45513f9377b3fb42240bec63f203c71416093476ec9aa0719dc/numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea", size = 18651560, upload-time = "2025-09-09T15:57:43.459Z" }, - { url = "https://files.pythonhosted.org/packages/5b/8e/3ab61a730bdbbc201bb245a71102aa609f0008b9ed15255500a99cd7f780/numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd", size = 6442776, upload-time = "2025-09-09T15:57:45.793Z" }, - { url = "https://files.pythonhosted.org/packages/1c/3a/e22b766b11f6030dc2decdeff5c2fb1610768055603f9f3be88b6d192fb2/numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d", size = 12927281, upload-time = "2025-09-09T15:57:47.492Z" }, - { url = "https://files.pythonhosted.org/packages/7b/42/c2e2bc48c5e9b2a83423f99733950fbefd86f165b468a3d85d52b30bf782/numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1", size = 10265275, upload-time = "2025-09-09T15:57:49.647Z" }, - { url = "https://files.pythonhosted.org/packages/6b/01/342ad585ad82419b99bcf7cebe99e61da6bedb89e213c5fd71acc467faee/numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593", size = 20951527, upload-time = "2025-09-09T15:57:52.006Z" }, - { url = "https://files.pythonhosted.org/packages/ef/d8/204e0d73fc1b7a9ee80ab1fe1983dd33a4d64a4e30a05364b0208e9a241a/numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652", size = 14186159, upload-time = "2025-09-09T15:57:54.407Z" }, - { url = "https://files.pythonhosted.org/packages/22/af/f11c916d08f3a18fb8ba81ab72b5b74a6e42ead4c2846d270eb19845bf74/numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7", size = 5114624, upload-time = "2025-09-09T15:57:56.5Z" }, - { url = "https://files.pythonhosted.org/packages/fb/11/0ed919c8381ac9d2ffacd63fd1f0c34d27e99cab650f0eb6f110e6ae4858/numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a", size = 6642627, upload-time = "2025-09-09T15:57:58.206Z" }, - { url = "https://files.pythonhosted.org/packages/ee/83/deb5f77cb0f7ba6cb52b91ed388b47f8f3c2e9930d4665c600408d9b90b9/numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe", size = 14296926, upload-time = "2025-09-09T15:58:00.035Z" }, - { url = "https://files.pythonhosted.org/packages/77/cc/70e59dcb84f2b005d4f306310ff0a892518cc0c8000a33d0e6faf7ca8d80/numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421", size = 16638958, upload-time = "2025-09-09T15:58:02.738Z" }, - { url = "https://files.pythonhosted.org/packages/b6/5a/b2ab6c18b4257e099587d5b7f903317bd7115333ad8d4ec4874278eafa61/numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021", size = 16071920, upload-time = "2025-09-09T15:58:05.029Z" }, - { url = "https://files.pythonhosted.org/packages/b8/f1/8b3fdc44324a259298520dd82147ff648979bed085feeacc1250ef1656c0/numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf", size = 18577076, upload-time = "2025-09-09T15:58:07.745Z" }, - { url = "https://files.pythonhosted.org/packages/f0/a1/b87a284fb15a42e9274e7fcea0dad259d12ddbf07c1595b26883151ca3b4/numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0", size = 6366952, upload-time = "2025-09-09T15:58:10.096Z" }, - { url = "https://files.pythonhosted.org/packages/70/5f/1816f4d08f3b8f66576d8433a66f8fa35a5acfb3bbd0bf6c31183b003f3d/numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8", size = 12919322, upload-time = "2025-09-09T15:58:12.138Z" }, - { url = "https://files.pythonhosted.org/packages/8c/de/072420342e46a8ea41c324a555fa90fcc11637583fb8df722936aed1736d/numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe", size = 10478630, upload-time = "2025-09-09T15:58:14.64Z" }, - { url = "https://files.pythonhosted.org/packages/d5/df/ee2f1c0a9de7347f14da5dd3cd3c3b034d1b8607ccb6883d7dd5c035d631/numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00", size = 21047987, upload-time = "2025-09-09T15:58:16.889Z" }, - { url = "https://files.pythonhosted.org/packages/d6/92/9453bdc5a4e9e69cf4358463f25e8260e2ffc126d52e10038b9077815989/numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a", size = 14301076, upload-time = "2025-09-09T15:58:20.343Z" }, - { url = "https://files.pythonhosted.org/packages/13/77/1447b9eb500f028bb44253105bd67534af60499588a5149a94f18f2ca917/numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d", size = 5229491, upload-time = "2025-09-09T15:58:22.481Z" }, - { url = "https://files.pythonhosted.org/packages/3d/f9/d72221b6ca205f9736cb4b2ce3b002f6e45cd67cd6a6d1c8af11a2f0b649/numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a", size = 6737913, upload-time = "2025-09-09T15:58:24.569Z" }, - { url = "https://files.pythonhosted.org/packages/3c/5f/d12834711962ad9c46af72f79bb31e73e416ee49d17f4c797f72c96b6ca5/numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54", size = 14352811, upload-time = "2025-09-09T15:58:26.416Z" }, - { url = "https://files.pythonhosted.org/packages/a1/0d/fdbec6629d97fd1bebed56cd742884e4eead593611bbe1abc3eb40d304b2/numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e", size = 16702689, upload-time = "2025-09-09T15:58:28.831Z" }, - { url = "https://files.pythonhosted.org/packages/9b/09/0a35196dc5575adde1eb97ddfbc3e1687a814f905377621d18ca9bc2b7dd/numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097", size = 16133855, upload-time = "2025-09-09T15:58:31.349Z" }, - { url = "https://files.pythonhosted.org/packages/7a/ca/c9de3ea397d576f1b6753eaa906d4cdef1bf97589a6d9825a349b4729cc2/numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970", size = 18652520, upload-time = "2025-09-09T15:58:33.762Z" }, - { url = "https://files.pythonhosted.org/packages/fd/c2/e5ed830e08cd0196351db55db82f65bc0ab05da6ef2b72a836dcf1936d2f/numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5", size = 6515371, upload-time = "2025-09-09T15:58:36.04Z" }, - { url = "https://files.pythonhosted.org/packages/47/c7/b0f6b5b67f6788a0725f744496badbb604d226bf233ba716683ebb47b570/numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f", size = 13112576, upload-time = "2025-09-09T15:58:37.927Z" }, - { url = "https://files.pythonhosted.org/packages/06/b9/33bba5ff6fb679aa0b1f8a07e853f002a6b04b9394db3069a1270a7784ca/numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b", size = 10545953, upload-time = "2025-09-09T15:58:40.576Z" }, - { url = "https://files.pythonhosted.org/packages/b8/f2/7e0a37cfced2644c9563c529f29fa28acbd0960dde32ece683aafa6f4949/numpy-2.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1e02c7159791cd481e1e6d5ddd766b62a4d5acf8df4d4d1afe35ee9c5c33a41e", size = 21131019, upload-time = "2025-09-09T15:58:42.838Z" }, - { url = "https://files.pythonhosted.org/packages/1a/7e/3291f505297ed63831135a6cc0f474da0c868a1f31b0dd9a9f03a7a0d2ed/numpy-2.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:dca2d0fc80b3893ae72197b39f69d55a3cd8b17ea1b50aa4c62de82419936150", size = 14376288, upload-time = "2025-09-09T15:58:45.425Z" }, - { url = "https://files.pythonhosted.org/packages/bf/4b/ae02e985bdeee73d7b5abdefeb98aef1207e96d4c0621ee0cf228ddfac3c/numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:99683cbe0658f8271b333a1b1b4bb3173750ad59c0c61f5bbdc5b318918fffe3", size = 5305425, upload-time = "2025-09-09T15:58:48.6Z" }, - { url = "https://files.pythonhosted.org/packages/8b/eb/9df215d6d7250db32007941500dc51c48190be25f2401d5b2b564e467247/numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d9d537a39cc9de668e5cd0e25affb17aec17b577c6b3ae8a3d866b479fbe88d0", size = 6819053, upload-time = "2025-09-09T15:58:50.401Z" }, - { url = "https://files.pythonhosted.org/packages/57/62/208293d7d6b2a8998a4a1f23ac758648c3c32182d4ce4346062018362e29/numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8596ba2f8af5f93b01d97563832686d20206d303024777f6dfc2e7c7c3f1850e", size = 14420354, upload-time = "2025-09-09T15:58:52.704Z" }, - { url = "https://files.pythonhosted.org/packages/ed/0c/8e86e0ff7072e14a71b4c6af63175e40d1e7e933ce9b9e9f765a95b4e0c3/numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1ec5615b05369925bd1125f27df33f3b6c8bc10d788d5999ecd8769a1fa04db", size = 16760413, upload-time = "2025-09-09T15:58:55.027Z" }, - { url = "https://files.pythonhosted.org/packages/af/11/0cc63f9f321ccf63886ac203336777140011fb669e739da36d8db3c53b98/numpy-2.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2e267c7da5bf7309670523896df97f93f6e469fb931161f483cd6882b3b1a5dc", size = 12971844, upload-time = "2025-09-09T15:58:57.359Z" }, + { url = "https://files.pythonhosted.org/packages/60/e7/0e07379944aa8afb49a556a2b54587b828eb41dc9adc56fb7615b678ca53/numpy-2.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e78aecd2800b32e8347ce49316d3eaf04aed849cd5b38e0af39f829a4e59f5eb", size = 21259519, upload-time = "2025-10-15T16:15:19.012Z" }, + { url = "https://files.pythonhosted.org/packages/d0/cb/5a69293561e8819b09e34ed9e873b9a82b5f2ade23dce4c51dc507f6cfe1/numpy-2.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fd09cc5d65bda1e79432859c40978010622112e9194e581e3415a3eccc7f43f", size = 14452796, upload-time = "2025-10-15T16:15:23.094Z" }, + { url = "https://files.pythonhosted.org/packages/e4/04/ff11611200acd602a1e5129e36cfd25bf01ad8e5cf927baf2e90236eb02e/numpy-2.3.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1b219560ae2c1de48ead517d085bc2d05b9433f8e49d0955c82e8cd37bd7bf36", size = 5381639, upload-time = "2025-10-15T16:15:25.572Z" }, + { url = "https://files.pythonhosted.org/packages/ea/77/e95c757a6fe7a48d28a009267408e8aa382630cc1ad1db7451b3bc21dbb4/numpy-2.3.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:bafa7d87d4c99752d07815ed7a2c0964f8ab311eb8168f41b910bd01d15b6032", size = 6914296, upload-time = "2025-10-15T16:15:27.079Z" }, + { url = "https://files.pythonhosted.org/packages/a3/d2/137c7b6841c942124eae921279e5c41b1c34bab0e6fc60c7348e69afd165/numpy-2.3.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36dc13af226aeab72b7abad501d370d606326a0029b9f435eacb3b8c94b8a8b7", size = 14591904, upload-time = "2025-10-15T16:15:29.044Z" }, + { url = "https://files.pythonhosted.org/packages/bb/32/67e3b0f07b0aba57a078c4ab777a9e8e6bc62f24fb53a2337f75f9691699/numpy-2.3.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7b2f9a18b5ff9824a6af80de4f37f4ec3c2aab05ef08f51c77a093f5b89adda", size = 16939602, upload-time = "2025-10-15T16:15:31.106Z" }, + { url = "https://files.pythonhosted.org/packages/95/22/9639c30e32c93c4cee3ccdb4b09c2d0fbff4dcd06d36b357da06146530fb/numpy-2.3.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9984bd645a8db6ca15d850ff996856d8762c51a2239225288f08f9050ca240a0", size = 16372661, upload-time = "2025-10-15T16:15:33.546Z" }, + { url = "https://files.pythonhosted.org/packages/12/e9/a685079529be2b0156ae0c11b13d6be647743095bb51d46589e95be88086/numpy-2.3.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:64c5825affc76942973a70acf438a8ab618dbd692b84cd5ec40a0a0509edc09a", size = 18884682, upload-time = "2025-10-15T16:15:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/cf/85/f6f00d019b0cc741e64b4e00ce865a57b6bed945d1bbeb1ccadbc647959b/numpy-2.3.4-cp311-cp311-win32.whl", hash = "sha256:ed759bf7a70342f7817d88376eb7142fab9fef8320d6019ef87fae05a99874e1", size = 6570076, upload-time = "2025-10-15T16:15:38.225Z" }, + { url = "https://files.pythonhosted.org/packages/7d/10/f8850982021cb90e2ec31990291f9e830ce7d94eef432b15066e7cbe0bec/numpy-2.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:faba246fb30ea2a526c2e9645f61612341de1a83fb1e0c5edf4ddda5a9c10996", size = 13089358, upload-time = "2025-10-15T16:15:40.404Z" }, + { url = "https://files.pythonhosted.org/packages/d1/ad/afdd8351385edf0b3445f9e24210a9c3971ef4de8fd85155462fc4321d79/numpy-2.3.4-cp311-cp311-win_arm64.whl", hash = "sha256:4c01835e718bcebe80394fd0ac66c07cbb90147ebbdad3dcecd3f25de2ae7e2c", size = 10462292, upload-time = "2025-10-15T16:15:42.896Z" }, + { url = "https://files.pythonhosted.org/packages/96/7a/02420400b736f84317e759291b8edaeee9dc921f72b045475a9cbdb26b17/numpy-2.3.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ef1b5a3e808bc40827b5fa2c8196151a4c5abe110e1726949d7abddfe5c7ae11", size = 20957727, upload-time = "2025-10-15T16:15:44.9Z" }, + { url = "https://files.pythonhosted.org/packages/18/90/a014805d627aa5750f6f0e878172afb6454552da929144b3c07fcae1bb13/numpy-2.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2f91f496a87235c6aaf6d3f3d89b17dba64996abadccb289f48456cff931ca9", size = 14187262, upload-time = "2025-10-15T16:15:47.761Z" }, + { url = "https://files.pythonhosted.org/packages/c7/e4/0a94b09abe89e500dc748e7515f21a13e30c5c3fe3396e6d4ac108c25fca/numpy-2.3.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f77e5b3d3da652b474cc80a14084927a5e86a5eccf54ca8ca5cbd697bf7f2667", size = 5115992, upload-time = "2025-10-15T16:15:50.144Z" }, + { url = "https://files.pythonhosted.org/packages/88/dd/db77c75b055c6157cbd4f9c92c4458daef0dd9cbe6d8d2fe7f803cb64c37/numpy-2.3.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ab1c5f5ee40d6e01cbe96de5863e39b215a4d24e7d007cad56c7184fdf4aeef", size = 6648672, upload-time = "2025-10-15T16:15:52.442Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e6/e31b0d713719610e406c0ea3ae0d90760465b086da8783e2fd835ad59027/numpy-2.3.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77b84453f3adcb994ddbd0d1c5d11db2d6bda1a2b7fd5ac5bd4649d6f5dc682e", size = 14284156, upload-time = "2025-10-15T16:15:54.351Z" }, + { url = "https://files.pythonhosted.org/packages/f9/58/30a85127bfee6f108282107caf8e06a1f0cc997cb6b52cdee699276fcce4/numpy-2.3.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4121c5beb58a7f9e6dfdee612cb24f4df5cd4db6e8261d7f4d7450a997a65d6a", size = 16641271, upload-time = "2025-10-15T16:15:56.67Z" }, + { url = "https://files.pythonhosted.org/packages/06/f2/2e06a0f2adf23e3ae29283ad96959267938d0efd20a2e25353b70065bfec/numpy-2.3.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65611ecbb00ac9846efe04db15cbe6186f562f6bb7e5e05f077e53a599225d16", size = 16059531, upload-time = "2025-10-15T16:15:59.412Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e7/b106253c7c0d5dc352b9c8fab91afd76a93950998167fa3e5afe4ef3a18f/numpy-2.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dabc42f9c6577bcc13001b8810d300fe814b4cfbe8a92c873f269484594f9786", size = 18578983, upload-time = "2025-10-15T16:16:01.804Z" }, + { url = "https://files.pythonhosted.org/packages/73/e3/04ecc41e71462276ee867ccbef26a4448638eadecf1bc56772c9ed6d0255/numpy-2.3.4-cp312-cp312-win32.whl", hash = "sha256:a49d797192a8d950ca59ee2d0337a4d804f713bb5c3c50e8db26d49666e351dc", size = 6291380, upload-time = "2025-10-15T16:16:03.938Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a8/566578b10d8d0e9955b1b6cd5db4e9d4592dd0026a941ff7994cedda030a/numpy-2.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:985f1e46358f06c2a09921e8921e2c98168ed4ae12ccd6e5e87a4f1857923f32", size = 12787999, upload-time = "2025-10-15T16:16:05.801Z" }, + { url = "https://files.pythonhosted.org/packages/58/22/9c903a957d0a8071b607f5b1bff0761d6e608b9a965945411f867d515db1/numpy-2.3.4-cp312-cp312-win_arm64.whl", hash = "sha256:4635239814149e06e2cb9db3dd584b2fa64316c96f10656983b8026a82e6e4db", size = 10197412, upload-time = "2025-10-15T16:16:07.854Z" }, + { url = "https://files.pythonhosted.org/packages/57/7e/b72610cc91edf138bc588df5150957a4937221ca6058b825b4725c27be62/numpy-2.3.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c090d4860032b857d94144d1a9976b8e36709e40386db289aaf6672de2a81966", size = 20950335, upload-time = "2025-10-15T16:16:10.304Z" }, + { url = "https://files.pythonhosted.org/packages/3e/46/bdd3370dcea2f95ef14af79dbf81e6927102ddf1cc54adc0024d61252fd9/numpy-2.3.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a13fc473b6db0be619e45f11f9e81260f7302f8d180c49a22b6e6120022596b3", size = 14179878, upload-time = "2025-10-15T16:16:12.595Z" }, + { url = "https://files.pythonhosted.org/packages/ac/01/5a67cb785bda60f45415d09c2bc245433f1c68dd82eef9c9002c508b5a65/numpy-2.3.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:3634093d0b428e6c32c3a69b78e554f0cd20ee420dcad5a9f3b2a63762ce4197", size = 5108673, upload-time = "2025-10-15T16:16:14.877Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cd/8428e23a9fcebd33988f4cb61208fda832800ca03781f471f3727a820704/numpy-2.3.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:043885b4f7e6e232d7df4f51ffdef8c36320ee9d5f227b380ea636722c7ed12e", size = 6641438, upload-time = "2025-10-15T16:16:16.805Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d1/913fe563820f3c6b079f992458f7331278dcd7ba8427e8e745af37ddb44f/numpy-2.3.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ee6a571d1e4f0ea6d5f22d6e5fbd6ed1dc2b18542848e1e7301bd190500c9d7", size = 14281290, upload-time = "2025-10-15T16:16:18.764Z" }, + { url = "https://files.pythonhosted.org/packages/9e/7e/7d306ff7cb143e6d975cfa7eb98a93e73495c4deabb7d1b5ecf09ea0fd69/numpy-2.3.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fc8a63918b04b8571789688b2780ab2b4a33ab44bfe8ccea36d3eba51228c953", size = 16636543, upload-time = "2025-10-15T16:16:21.072Z" }, + { url = "https://files.pythonhosted.org/packages/47/6a/8cfc486237e56ccfb0db234945552a557ca266f022d281a2f577b98e955c/numpy-2.3.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:40cc556d5abbc54aabe2b1ae287042d7bdb80c08edede19f0c0afb36ae586f37", size = 16056117, upload-time = "2025-10-15T16:16:23.369Z" }, + { url = "https://files.pythonhosted.org/packages/b1/0e/42cb5e69ea901e06ce24bfcc4b5664a56f950a70efdcf221f30d9615f3f3/numpy-2.3.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ecb63014bb7f4ce653f8be7f1df8cbc6093a5a2811211770f6606cc92b5a78fd", size = 18577788, upload-time = "2025-10-15T16:16:27.496Z" }, + { url = "https://files.pythonhosted.org/packages/86/92/41c3d5157d3177559ef0a35da50f0cda7fa071f4ba2306dd36818591a5bc/numpy-2.3.4-cp313-cp313-win32.whl", hash = "sha256:e8370eb6925bb8c1c4264fec52b0384b44f675f191df91cbe0140ec9f0955646", size = 6282620, upload-time = "2025-10-15T16:16:29.811Z" }, + { url = "https://files.pythonhosted.org/packages/09/97/fd421e8bc50766665ad35536c2bb4ef916533ba1fdd053a62d96cc7c8b95/numpy-2.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:56209416e81a7893036eea03abcb91c130643eb14233b2515c90dcac963fe99d", size = 12784672, upload-time = "2025-10-15T16:16:31.589Z" }, + { url = "https://files.pythonhosted.org/packages/ad/df/5474fb2f74970ca8eb978093969b125a84cc3d30e47f82191f981f13a8a0/numpy-2.3.4-cp313-cp313-win_arm64.whl", hash = "sha256:a700a4031bc0fd6936e78a752eefb79092cecad2599ea9c8039c548bc097f9bc", size = 10196702, upload-time = "2025-10-15T16:16:33.902Z" }, + { url = "https://files.pythonhosted.org/packages/11/83/66ac031464ec1767ea3ed48ce40f615eb441072945e98693bec0bcd056cc/numpy-2.3.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:86966db35c4040fdca64f0816a1c1dd8dbd027d90fca5a57e00e1ca4cd41b879", size = 21049003, upload-time = "2025-10-15T16:16:36.101Z" }, + { url = "https://files.pythonhosted.org/packages/5f/99/5b14e0e686e61371659a1d5bebd04596b1d72227ce36eed121bb0aeab798/numpy-2.3.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:838f045478638b26c375ee96ea89464d38428c69170360b23a1a50fa4baa3562", size = 14302980, upload-time = "2025-10-15T16:16:39.124Z" }, + { url = "https://files.pythonhosted.org/packages/2c/44/e9486649cd087d9fc6920e3fc3ac2aba10838d10804b1e179fb7cbc4e634/numpy-2.3.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d7315ed1dab0286adca467377c8381cd748f3dc92235f22a7dfc42745644a96a", size = 5231472, upload-time = "2025-10-15T16:16:41.168Z" }, + { url = "https://files.pythonhosted.org/packages/3e/51/902b24fa8887e5fe2063fd61b1895a476d0bbf46811ab0c7fdf4bd127345/numpy-2.3.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:84f01a4d18b2cc4ade1814a08e5f3c907b079c847051d720fad15ce37aa930b6", size = 6739342, upload-time = "2025-10-15T16:16:43.777Z" }, + { url = "https://files.pythonhosted.org/packages/34/f1/4de9586d05b1962acdcdb1dc4af6646361a643f8c864cef7c852bf509740/numpy-2.3.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:817e719a868f0dacde4abdfc5c1910b301877970195db9ab6a5e2c4bd5b121f7", size = 14354338, upload-time = "2025-10-15T16:16:46.081Z" }, + { url = "https://files.pythonhosted.org/packages/1f/06/1c16103b425de7969d5a76bdf5ada0804b476fed05d5f9e17b777f1cbefd/numpy-2.3.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85e071da78d92a214212cacea81c6da557cab307f2c34b5f85b628e94803f9c0", size = 16702392, upload-time = "2025-10-15T16:16:48.455Z" }, + { url = "https://files.pythonhosted.org/packages/34/b2/65f4dc1b89b5322093572b6e55161bb42e3e0487067af73627f795cc9d47/numpy-2.3.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2ec646892819370cf3558f518797f16597b4e4669894a2ba712caccc9da53f1f", size = 16134998, upload-time = "2025-10-15T16:16:51.114Z" }, + { url = "https://files.pythonhosted.org/packages/d4/11/94ec578896cdb973aaf56425d6c7f2aff4186a5c00fac15ff2ec46998b46/numpy-2.3.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:035796aaaddfe2f9664b9a9372f089cfc88bd795a67bd1bfe15e6e770934cf64", size = 18651574, upload-time = "2025-10-15T16:16:53.429Z" }, + { url = "https://files.pythonhosted.org/packages/62/b7/7efa763ab33dbccf56dade36938a77345ce8e8192d6b39e470ca25ff3cd0/numpy-2.3.4-cp313-cp313t-win32.whl", hash = "sha256:fea80f4f4cf83b54c3a051f2f727870ee51e22f0248d3114b8e755d160b38cfb", size = 6413135, upload-time = "2025-10-15T16:16:55.992Z" }, + { url = "https://files.pythonhosted.org/packages/43/70/aba4c38e8400abcc2f345e13d972fb36c26409b3e644366db7649015f291/numpy-2.3.4-cp313-cp313t-win_amd64.whl", hash = "sha256:15eea9f306b98e0be91eb344a94c0e630689ef302e10c2ce5f7e11905c704f9c", size = 12928582, upload-time = "2025-10-15T16:16:57.943Z" }, + { url = "https://files.pythonhosted.org/packages/67/63/871fad5f0073fc00fbbdd7232962ea1ac40eeaae2bba66c76214f7954236/numpy-2.3.4-cp313-cp313t-win_arm64.whl", hash = "sha256:b6c231c9c2fadbae4011ca5e7e83e12dc4a5072f1a1d85a0a7b3ed754d145a40", size = 10266691, upload-time = "2025-10-15T16:17:00.048Z" }, + { url = "https://files.pythonhosted.org/packages/72/71/ae6170143c115732470ae3a2d01512870dd16e0953f8a6dc89525696069b/numpy-2.3.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:81c3e6d8c97295a7360d367f9f8553973651b76907988bb6066376bc2252f24e", size = 20955580, upload-time = "2025-10-15T16:17:02.509Z" }, + { url = "https://files.pythonhosted.org/packages/af/39/4be9222ffd6ca8a30eda033d5f753276a9c3426c397bb137d8e19dedd200/numpy-2.3.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7c26b0b2bf58009ed1f38a641f3db4be8d960a417ca96d14e5b06df1506d41ff", size = 14188056, upload-time = "2025-10-15T16:17:04.873Z" }, + { url = "https://files.pythonhosted.org/packages/6c/3d/d85f6700d0a4aa4f9491030e1021c2b2b7421b2b38d01acd16734a2bfdc7/numpy-2.3.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:62b2198c438058a20b6704351b35a1d7db881812d8512d67a69c9de1f18ca05f", size = 5116555, upload-time = "2025-10-15T16:17:07.499Z" }, + { url = "https://files.pythonhosted.org/packages/bf/04/82c1467d86f47eee8a19a464c92f90a9bb68ccf14a54c5224d7031241ffb/numpy-2.3.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:9d729d60f8d53a7361707f4b68a9663c968882dd4f09e0d58c044c8bf5faee7b", size = 6643581, upload-time = "2025-10-15T16:17:09.774Z" }, + { url = "https://files.pythonhosted.org/packages/0c/d3/c79841741b837e293f48bd7db89d0ac7a4f2503b382b78a790ef1dc778a5/numpy-2.3.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd0c630cf256b0a7fd9d0a11c9413b42fef5101219ce6ed5a09624f5a65392c7", size = 14299186, upload-time = "2025-10-15T16:17:11.937Z" }, + { url = "https://files.pythonhosted.org/packages/e8/7e/4a14a769741fbf237eec5a12a2cbc7a4c4e061852b6533bcb9e9a796c908/numpy-2.3.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5e081bc082825f8b139f9e9fe42942cb4054524598aaeb177ff476cc76d09d2", size = 16638601, upload-time = "2025-10-15T16:17:14.391Z" }, + { url = "https://files.pythonhosted.org/packages/93/87/1c1de269f002ff0a41173fe01dcc925f4ecff59264cd8f96cf3b60d12c9b/numpy-2.3.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:15fb27364ed84114438fff8aaf998c9e19adbeba08c0b75409f8c452a8692c52", size = 16074219, upload-time = "2025-10-15T16:17:17.058Z" }, + { url = "https://files.pythonhosted.org/packages/cd/28/18f72ee77408e40a76d691001ae599e712ca2a47ddd2c4f695b16c65f077/numpy-2.3.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:85d9fb2d8cd998c84d13a79a09cc0c1091648e848e4e6249b0ccd7f6b487fa26", size = 18576702, upload-time = "2025-10-15T16:17:19.379Z" }, + { url = "https://files.pythonhosted.org/packages/c3/76/95650169b465ececa8cf4b2e8f6df255d4bf662775e797ade2025cc51ae6/numpy-2.3.4-cp314-cp314-win32.whl", hash = "sha256:e73d63fd04e3a9d6bc187f5455d81abfad05660b212c8804bf3b407e984cd2bc", size = 6337136, upload-time = "2025-10-15T16:17:22.886Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/a231a5c43ede5d6f77ba4a91e915a87dea4aeea76560ba4d2bf185c683f0/numpy-2.3.4-cp314-cp314-win_amd64.whl", hash = "sha256:3da3491cee49cf16157e70f607c03a217ea6647b1cea4819c4f48e53d49139b9", size = 12920542, upload-time = "2025-10-15T16:17:24.783Z" }, + { url = "https://files.pythonhosted.org/packages/0d/0c/ae9434a888f717c5ed2ff2393b3f344f0ff6f1c793519fa0c540461dc530/numpy-2.3.4-cp314-cp314-win_arm64.whl", hash = "sha256:6d9cd732068e8288dbe2717177320723ccec4fb064123f0caf9bbd90ab5be868", size = 10480213, upload-time = "2025-10-15T16:17:26.935Z" }, + { url = "https://files.pythonhosted.org/packages/83/4b/c4a5f0841f92536f6b9592694a5b5f68c9ab37b775ff342649eadf9055d3/numpy-2.3.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:22758999b256b595cf0b1d102b133bb61866ba5ceecf15f759623b64c020c9ec", size = 21052280, upload-time = "2025-10-15T16:17:29.638Z" }, + { url = "https://files.pythonhosted.org/packages/3e/80/90308845fc93b984d2cc96d83e2324ce8ad1fd6efea81b324cba4b673854/numpy-2.3.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9cb177bc55b010b19798dc5497d540dea67fd13a8d9e882b2dae71de0cf09eb3", size = 14302930, upload-time = "2025-10-15T16:17:32.384Z" }, + { url = "https://files.pythonhosted.org/packages/3d/4e/07439f22f2a3b247cec4d63a713faae55e1141a36e77fb212881f7cda3fb/numpy-2.3.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0f2bcc76f1e05e5ab58893407c63d90b2029908fa41f9f1cc51eecce936c3365", size = 5231504, upload-time = "2025-10-15T16:17:34.515Z" }, + { url = "https://files.pythonhosted.org/packages/ab/de/1e11f2547e2fe3d00482b19721855348b94ada8359aef5d40dd57bfae9df/numpy-2.3.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dc20bde86802df2ed8397a08d793da0ad7a5fd4ea3ac85d757bf5dd4ad7c252", size = 6739405, upload-time = "2025-10-15T16:17:36.128Z" }, + { url = "https://files.pythonhosted.org/packages/3b/40/8cd57393a26cebe2e923005db5134a946c62fa56a1087dc7c478f3e30837/numpy-2.3.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e199c087e2aa71c8f9ce1cb7a8e10677dc12457e7cc1be4798632da37c3e86e", size = 14354866, upload-time = "2025-10-15T16:17:38.884Z" }, + { url = "https://files.pythonhosted.org/packages/93/39/5b3510f023f96874ee6fea2e40dfa99313a00bf3ab779f3c92978f34aace/numpy-2.3.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85597b2d25ddf655495e2363fe044b0ae999b75bc4d630dc0d886484b03a5eb0", size = 16703296, upload-time = "2025-10-15T16:17:41.564Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/19bb163617c8045209c1996c4e427bccbc4bbff1e2c711f39203c8ddbb4a/numpy-2.3.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04a69abe45b49c5955923cf2c407843d1c85013b424ae8a560bba16c92fe44a0", size = 16136046, upload-time = "2025-10-15T16:17:43.901Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c1/6dba12fdf68b02a21ac411c9df19afa66bed2540f467150ca64d246b463d/numpy-2.3.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e1708fac43ef8b419c975926ce1eaf793b0c13b7356cfab6ab0dc34c0a02ac0f", size = 18652691, upload-time = "2025-10-15T16:17:46.247Z" }, + { url = "https://files.pythonhosted.org/packages/f8/73/f85056701dbbbb910c51d846c58d29fd46b30eecd2b6ba760fc8b8a1641b/numpy-2.3.4-cp314-cp314t-win32.whl", hash = "sha256:863e3b5f4d9915aaf1b8ec79ae560ad21f0b8d5e3adc31e73126491bb86dee1d", size = 6485782, upload-time = "2025-10-15T16:17:48.872Z" }, + { url = "https://files.pythonhosted.org/packages/17/90/28fa6f9865181cb817c2471ee65678afa8a7e2a1fb16141473d5fa6bacc3/numpy-2.3.4-cp314-cp314t-win_amd64.whl", hash = "sha256:962064de37b9aef801d33bc579690f8bfe6c5e70e29b61783f60bcba838a14d6", size = 13113301, upload-time = "2025-10-15T16:17:50.938Z" }, + { url = "https://files.pythonhosted.org/packages/54/23/08c002201a8e7e1f9afba93b97deceb813252d9cfd0d3351caed123dcf97/numpy-2.3.4-cp314-cp314t-win_arm64.whl", hash = "sha256:8b5a9a39c45d852b62693d9b3f3e0fe052541f804296ff401a72a1b60edafb29", size = 10547532, upload-time = "2025-10-15T16:17:53.48Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b6/64898f51a86ec88ca1257a59c1d7fd077b60082a119affefcdf1dd0df8ca/numpy-2.3.4-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6e274603039f924c0fe5cb73438fa9246699c78a6df1bd3decef9ae592ae1c05", size = 21131552, upload-time = "2025-10-15T16:17:55.845Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4c/f135dc6ebe2b6a3c77f4e4838fa63d350f85c99462012306ada1bd4bc460/numpy-2.3.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d149aee5c72176d9ddbc6803aef9c0f6d2ceeea7626574fc68518da5476fa346", size = 14377796, upload-time = "2025-10-15T16:17:58.308Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a4/f33f9c23fcc13dd8412fc8614559b5b797e0aba9d8e01dfa8bae10c84004/numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:6d34ed9db9e6395bb6cd33286035f73a59b058169733a9db9f85e650b88df37e", size = 5306904, upload-time = "2025-10-15T16:18:00.596Z" }, + { url = "https://files.pythonhosted.org/packages/28/af/c44097f25f834360f9fb960fa082863e0bad14a42f36527b2a121abdec56/numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:fdebe771ca06bb8d6abce84e51dca9f7921fe6ad34a0c914541b063e9a68928b", size = 6819682, upload-time = "2025-10-15T16:18:02.32Z" }, + { url = "https://files.pythonhosted.org/packages/c5/8c/cd283b54c3c2b77e188f63e23039844f56b23bba1712318288c13fe86baf/numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e92defe6c08211eb77902253b14fe5b480ebc5112bc741fd5e9cd0608f847", size = 14422300, upload-time = "2025-10-15T16:18:04.271Z" }, + { url = "https://files.pythonhosted.org/packages/b0/f0/8404db5098d92446b3e3695cf41c6f0ecb703d701cb0b7566ee2177f2eee/numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13b9062e4f5c7ee5c7e5be96f29ba71bc5a37fed3d1d77c37390ae00724d296d", size = 16760806, upload-time = "2025-10-15T16:18:06.668Z" }, + { url = "https://files.pythonhosted.org/packages/95/8e/2844c3959ce9a63acc7c8e50881133d86666f0420bcde695e115ced0920f/numpy-2.3.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:81b3a59793523e552c4a96109dde028aa4448ae06ccac5a76ff6532a85558a7f", size = 12973130, upload-time = "2025-10-15T16:18:09.397Z" }, ] [[package]] name = "openai" -version = "2.1.0" +version = "2.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1043,9 +1248,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/dd/4d4d46a06943e37c95b6e388237e1e38d1e9aab264ff070f86345d60b7a4/openai-2.1.0.tar.gz", hash = "sha256:47f3463a5047340a989b4c0cd5378054acfca966ff61a96553b22f098e3270a2", size = 572998, upload-time = "2025-10-02T20:43:15.385Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/24/d0b0f088c39fa75a73292aef24d7436fbc537f12bf6026c6a69a1bfdae6e/openai-2.4.0.tar.gz", hash = "sha256:97860859172b637ffb308433c207a371d4683586ed2b24b360cb4c08cf377d01", size = 591541, upload-time = "2025-10-16T15:14:05.163Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/83/88f64fc8f037885efa8a629d1215f5bc1f037453bab4d4f823b5533319eb/openai-2.1.0-py3-none-any.whl", hash = "sha256:33172e8c06a4576144ba4137a493807a9ca427421dcabc54ad3aa656daf757d3", size = 964939, upload-time = "2025-10-02T20:43:13.568Z" }, + { url = "https://files.pythonhosted.org/packages/d8/f6/68a8bbb62c001e5580de6b89372ddab03c3484ac3e251c298a30da094f5e/openai-2.4.0-py3-none-any.whl", hash = "sha256:5099f4fbfa80e7e5785ba52402c580eadba21e6172c85df05455676605ad150f", size = 1003092, upload-time = "2025-10-16T15:14:02.826Z" }, ] [[package]] @@ -1125,6 +1330,61 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/28/01/d6b274a0635be0468d4dbd9cafe80c47105937a0d42434e805e67cd2ed8b/orjson-3.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:e8f6a7a27d7b7bec81bd5924163e9af03d49bbb63013f107b48eb5d16db711bc", size = 125985, upload-time = "2025-08-26T17:46:16.67Z" }, ] +[[package]] +name = "ormsgpack" +version = "1.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/f8/224c342c0e03e131aaa1a1f19aa2244e167001783a433f4eed10eedd834b/ormsgpack-1.11.0.tar.gz", hash = "sha256:7c9988e78fedba3292541eb3bb274fa63044ef4da2ddb47259ea70c05dee4206", size = 49357, upload-time = "2025-10-08T17:29:15.621Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/3d/6996193cb2babc47fc92456223bef7d141065357ad4204eccf313f47a7b3/ormsgpack-1.11.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:03d4e658dd6e1882a552ce1d13cc7b49157414e7d56a4091fbe7823225b08cba", size = 367965, upload-time = "2025-10-08T17:28:06.736Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/c83b805dd9caebb046f4ceeed3706d0902ed2dbbcf08b8464e89f2c52e05/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb67eb913c2b703f0ed39607fc56e50724dd41f92ce080a586b4d6149eb3fe4", size = 195209, upload-time = "2025-10-08T17:28:08.395Z" }, + { url = "https://files.pythonhosted.org/packages/3a/17/427d9c4f77b120f0af01d7a71d8144771c9388c2a81f712048320e31353b/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e54175b92411f73a238e5653a998627f6660de3def37d9dd7213e0fd264ca56", size = 205868, upload-time = "2025-10-08T17:28:09.688Z" }, + { url = "https://files.pythonhosted.org/packages/82/32/a9ce218478bdbf3fee954159900e24b314ab3064f7b6a217ccb1e3464324/ormsgpack-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca2b197f4556e1823d1319869d4c5dc278be335286d2308b0ed88b59a5afcc25", size = 207391, upload-time = "2025-10-08T17:28:11.031Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d3/4413fe7454711596fdf08adabdfa686580e4656702015108e4975f00a022/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc62388262f58c792fe1e450e1d9dbcc174ed2fb0b43db1675dd7c5ff2319d6a", size = 377078, upload-time = "2025-10-08T17:28:12.39Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ad/13fae555a45e35ca1ca929a27c9ee0a3ecada931b9d44454658c543f9b9c/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c48bc10af74adfbc9113f3fb160dc07c61ad9239ef264c17e449eba3de343dc2", size = 470776, upload-time = "2025-10-08T17:28:13.484Z" }, + { url = "https://files.pythonhosted.org/packages/36/60/51178b093ffc4e2ef3381013a67223e7d56224434fba80047249f4a84b26/ormsgpack-1.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a608d3a1d4fa4acdc5082168a54513cff91f47764cef435e81a483452f5f7647", size = 380862, upload-time = "2025-10-08T17:28:14.747Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e3/1cb6c161335e2ae7d711ecfb007a31a3936603626e347c13e5e53b7c7cf8/ormsgpack-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:97217b4f7f599ba45916b9c4c4b1d5656e8e2a4d91e2e191d72a7569d3c30923", size = 112058, upload-time = "2025-10-08T17:28:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/90164d00e8e94b48eff8a17bc2f4be6b71ae356a00904bc69d5e8afe80fb/ormsgpack-1.11.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c7be823f47d8e36648d4bc90634b93f02b7d7cc7480081195f34767e86f181fb", size = 367964, upload-time = "2025-10-08T17:28:16.778Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c2/fb6331e880a3446c1341e72c77bd5a46da3e92a8e2edf7ea84a4c6c14fff/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68accf15d1b013812755c0eb7a30e1fc2f81eb603a1a143bf0cda1b301cfa797", size = 195209, upload-time = "2025-10-08T17:28:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/50/4943fb5df8cc02da6b7b1ee2c2a7fb13aebc9f963d69280b1bb02b1fb178/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:805d06fb277d9a4e503c0c707545b49cde66cbb2f84e5cf7c58d81dfc20d8658", size = 205869, upload-time = "2025-10-08T17:28:19.01Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fa/e7e06835bfea9adeef43915143ce818098aecab0cbd3df584815adf3e399/ormsgpack-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1e57cdf003e77acc43643bda151dc01f97147a64b11cdee1380bb9698a7601c", size = 207391, upload-time = "2025-10-08T17:28:20.352Z" }, + { url = "https://files.pythonhosted.org/packages/33/f0/f28a19e938a14ec223396e94f4782fbcc023f8c91f2ab6881839d3550f32/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:37fc05bdaabd994097c62e2f3e08f66b03f856a640ede6dc5ea340bd15b77f4d", size = 377081, upload-time = "2025-10-08T17:28:21.926Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e3/73d1d7287637401b0b6637e30ba9121e1aa1d9f5ea185ed9834ca15d512c/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a6e9db6c73eb46b2e4d97bdffd1368a66f54e6806b563a997b19c004ef165e1d", size = 470779, upload-time = "2025-10-08T17:28:22.993Z" }, + { url = "https://files.pythonhosted.org/packages/9c/46/7ba7f9721e766dd0dfe4cedf444439447212abffe2d2f4538edeeec8ccbd/ormsgpack-1.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9c44eae5ac0196ffc8b5ed497c75511056508f2303fa4d36b208eb820cf209e", size = 380865, upload-time = "2025-10-08T17:28:24.012Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7d/bb92a0782bbe0626c072c0320001410cf3f6743ede7dc18f034b1a18edef/ormsgpack-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:11d0dfaf40ae7c6de4f7dbd1e4892e2e6a55d911ab1774357c481158d17371e4", size = 112058, upload-time = "2025-10-08T17:28:25.015Z" }, + { url = "https://files.pythonhosted.org/packages/28/1a/f07c6f74142815d67e1d9d98c5b2960007100408ade8242edac96d5d1c73/ormsgpack-1.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:0c63a3f7199a3099c90398a1bdf0cb577b06651a442dc5efe67f2882665e5b02", size = 105894, upload-time = "2025-10-08T17:28:25.93Z" }, + { url = "https://files.pythonhosted.org/packages/1e/16/2805ebfb3d2cbb6c661b5fae053960fc90a2611d0d93e2207e753e836117/ormsgpack-1.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3434d0c8d67de27d9010222de07fb6810fb9af3bb7372354ffa19257ac0eb83b", size = 368474, upload-time = "2025-10-08T17:28:27.532Z" }, + { url = "https://files.pythonhosted.org/packages/6f/39/6afae47822dca0ce4465d894c0bbb860a850ce29c157882dbdf77a5dd26e/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2da5bd097e8dbfa4eb0d4ccfe79acd6f538dee4493579e2debfe4fc8f4ca89b", size = 195321, upload-time = "2025-10-08T17:28:28.573Z" }, + { url = "https://files.pythonhosted.org/packages/f6/54/11eda6b59f696d2f16de469bfbe539c9f469c4b9eef5a513996b5879c6e9/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fdbaa0a5a8606a486960b60c24f2d5235d30ac7a8b98eeaea9854bffef14dc3d", size = 206036, upload-time = "2025-10-08T17:28:29.785Z" }, + { url = "https://files.pythonhosted.org/packages/1e/86/890430f704f84c4699ddad61c595d171ea2fd77a51fbc106f83981e83939/ormsgpack-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3682f24f800c1837017ee90ce321086b2cbaef88db7d4cdbbda1582aa6508159", size = 207615, upload-time = "2025-10-08T17:28:31.076Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b9/77383e16c991c0ecb772205b966fc68d9c519e0b5f9c3913283cbed30ffe/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fcca21202bb05ccbf3e0e92f560ee59b9331182e4c09c965a28155efbb134993", size = 377195, upload-time = "2025-10-08T17:28:32.436Z" }, + { url = "https://files.pythonhosted.org/packages/20/e2/15f9f045d4947f3c8a5e0535259fddf027b17b1215367488b3565c573b9d/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c30e5c4655ba46152d722ec7468e8302195e6db362ec1ae2c206bc64f6030e43", size = 470960, upload-time = "2025-10-08T17:28:33.556Z" }, + { url = "https://files.pythonhosted.org/packages/b8/61/403ce188c4c495bc99dff921a0ad3d9d352dd6d3c4b629f3638b7f0cf79b/ormsgpack-1.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7138a341f9e2c08c59368f03d3be25e8b87b3baaf10d30fb1f6f6b52f3d47944", size = 381174, upload-time = "2025-10-08T17:28:34.781Z" }, + { url = "https://files.pythonhosted.org/packages/14/a8/94c94bc48c68da4374870a851eea03fc5a45eb041182ad4c5ed9acfc05a4/ormsgpack-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d4bd8589b78a11026d47f4edf13c1ceab9088bb12451f34396afe6497db28a27", size = 112314, upload-time = "2025-10-08T17:28:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/19/d0/aa4cf04f04e4cc180ce7a8d8ddb5a7f3af883329cbc59645d94d3ba157a5/ormsgpack-1.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:e5e746a1223e70f111d4001dab9585ac8639eee8979ca0c8db37f646bf2961da", size = 106072, upload-time = "2025-10-08T17:28:37.518Z" }, + { url = "https://files.pythonhosted.org/packages/8b/35/e34722edb701d053cf2240f55974f17b7dbfd11fdef72bd2f1835bcebf26/ormsgpack-1.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e7b36ab7b45cb95217ae1f05f1318b14a3e5ef73cb00804c0f06233f81a14e8", size = 368502, upload-time = "2025-10-08T17:28:38.547Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6a/c2fc369a79d6aba2aa28c8763856c95337ac7fcc0b2742185cd19397212a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43402d67e03a9a35cc147c8c03f0c377cad016624479e1ee5b879b8425551484", size = 195344, upload-time = "2025-10-08T17:28:39.554Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6a/0f8e24b7489885534c1a93bdba7c7c434b9b8638713a68098867db9f254c/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:64fd992f932764d6306b70ddc755c1bc3405c4c6a69f77a36acf7af1c8f5ada4", size = 206045, upload-time = "2025-10-08T17:28:40.561Z" }, + { url = "https://files.pythonhosted.org/packages/99/71/8b460ba264f3c6f82ef5b1920335720094e2bd943057964ce5287d6df83a/ormsgpack-1.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0362fb7fe4a29c046c8ea799303079a09372653a1ce5a5a588f3bbb8088368d0", size = 207641, upload-time = "2025-10-08T17:28:41.736Z" }, + { url = "https://files.pythonhosted.org/packages/50/cf/f369446abaf65972424ed2651f2df2b7b5c3b735c93fc7fa6cfb81e34419/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:de2f7a65a9d178ed57be49eba3d0fc9b833c32beaa19dbd4ba56014d3c20b152", size = 377211, upload-time = "2025-10-08T17:28:43.12Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3f/948bb0047ce0f37c2efc3b9bb2bcfdccc61c63e0b9ce8088d4903ba39dcf/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:f38cfae95461466055af966fc922d06db4e1654966385cda2828653096db34da", size = 470973, upload-time = "2025-10-08T17:28:44.465Z" }, + { url = "https://files.pythonhosted.org/packages/31/a4/92a8114d1d017c14aaa403445060f345df9130ca532d538094f38e535988/ormsgpack-1.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c88396189d238f183cea7831b07a305ab5c90d6d29b53288ae11200bd956357b", size = 381161, upload-time = "2025-10-08T17:28:46.063Z" }, + { url = "https://files.pythonhosted.org/packages/d0/64/5b76447da654798bfcfdfd64ea29447ff2b7f33fe19d0e911a83ad5107fc/ormsgpack-1.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:5403d1a945dd7c81044cebeca3f00a28a0f4248b33242a5d2d82111628043725", size = 112321, upload-time = "2025-10-08T17:28:47.393Z" }, + { url = "https://files.pythonhosted.org/packages/46/5e/89900d06db9ab81e7ec1fd56a07c62dfbdcda398c435718f4252e1dc52a0/ormsgpack-1.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:c57357b8d43b49722b876edf317bdad9e6d52071b523fdd7394c30cd1c67d5a0", size = 106084, upload-time = "2025-10-08T17:28:48.305Z" }, + { url = "https://files.pythonhosted.org/packages/4c/0b/c659e8657085c8c13f6a0224789f422620cef506e26573b5434defe68483/ormsgpack-1.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:d390907d90fd0c908211592c485054d7a80990697ef4dff4e436ac18e1aab98a", size = 368497, upload-time = "2025-10-08T17:28:49.297Z" }, + { url = "https://files.pythonhosted.org/packages/1b/0e/451e5848c7ed56bd287e8a2b5cb5926e54466f60936e05aec6cb299f9143/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6153c2e92e789509098e04c9aa116b16673bd88ec78fbe0031deeb34ab642d10", size = 195385, upload-time = "2025-10-08T17:28:50.314Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/90f78cbbe494959f2439c2ec571f08cd3464c05a6a380b0d621c622122a9/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2b2c2a065a94d742212b2018e1fecd8f8d72f3c50b53a97d1f407418093446d", size = 206114, upload-time = "2025-10-08T17:28:51.336Z" }, + { url = "https://files.pythonhosted.org/packages/fb/db/34163f4c0923bea32dafe42cd878dcc66795a3e85669bc4b01c1e2b92a7b/ormsgpack-1.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:110e65b5340f3d7ef8b0009deae3c6b169437e6b43ad5a57fd1748085d29d2ac", size = 207679, upload-time = "2025-10-08T17:28:53.627Z" }, + { url = "https://files.pythonhosted.org/packages/b6/14/04ee741249b16f380a9b4a0cc19d4134d0b7c74bab27a2117da09e525eb9/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c27e186fca96ab34662723e65b420919910acbbc50fc8e1a44e08f26268cb0e0", size = 377237, upload-time = "2025-10-08T17:28:56.12Z" }, + { url = "https://files.pythonhosted.org/packages/89/ff/53e588a6aaa833237471caec679582c2950f0e7e1a8ba28c1511b465c1f4/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d56b1f877c13d499052d37a3db2378a97d5e1588d264f5040b3412aee23d742c", size = 471021, upload-time = "2025-10-08T17:28:57.299Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f9/f20a6d9ef2be04da3aad05e8f5699957e9a30c6d5c043a10a296afa7e890/ormsgpack-1.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c88e28cd567c0a3269f624b4ade28142d5e502c8e826115093c572007af5be0a", size = 381205, upload-time = "2025-10-08T17:28:58.872Z" }, + { url = "https://files.pythonhosted.org/packages/f8/64/96c07d084b479ac8b7821a77ffc8d3f29d8b5c95ebfdf8db1c03dff02762/ormsgpack-1.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:8811160573dc0a65f62f7e0792c4ca6b7108dfa50771edb93f9b84e2d45a08ae", size = 112374, upload-time = "2025-10-08T17:29:00Z" }, + { url = "https://files.pythonhosted.org/packages/88/a5/5dcc18b818d50213a3cadfe336bb6163a102677d9ce87f3d2f1a1bee0f8c/ormsgpack-1.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:23e30a8d3c17484cf74e75e6134322255bd08bc2b5b295cc9c442f4bae5f3c2d", size = 106056, upload-time = "2025-10-08T17:29:01.29Z" }, + { url = "https://files.pythonhosted.org/packages/19/2b/776d1b411d2be50f77a6e6e94a25825cca55dcacfe7415fd691a144db71b/ormsgpack-1.11.0-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2905816502adfaf8386a01dd85f936cd378d243f4f5ee2ff46f67f6298dc90d5", size = 368661, upload-time = "2025-10-08T17:29:02.382Z" }, + { url = "https://files.pythonhosted.org/packages/a9/0c/81a19e6115b15764db3d241788f9fac093122878aaabf872cc545b0c4650/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c04402fb9a0a9b9f18fbafd6d5f8398ee99b3ec619fb63952d3a954bc9d47daa", size = 195539, upload-time = "2025-10-08T17:29:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/97/86/e5b50247a61caec5718122feb2719ea9d451d30ac0516c288c1dbc6408e8/ormsgpack-1.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a025ec07ac52056ecfd9e57b5cbc6fff163f62cb9805012b56cda599157f8ef2", size = 207718, upload-time = "2025-10-08T17:29:04.545Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -1145,61 +1405,104 @@ wheels = [ [[package]] name = "pillow" -version = "10.4.0" +version = "11.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/74/ad3d526f3bf7b6d3f408b73fde271ec69dfac8b81341a318ce825f2b3812/pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", size = 46555059, upload-time = "2024-07-01T09:48:43.583Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069, upload-time = "2025-07-01T09:16:30.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/69/a31cccd538ca0b5272be2a38347f8839b97a14be104ea08b0db92f749c74/pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e", size = 3509271, upload-time = "2024-07-01T09:45:22.07Z" }, - { url = "https://files.pythonhosted.org/packages/9a/9e/4143b907be8ea0bce215f2ae4f7480027473f8b61fcedfda9d851082a5d2/pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d", size = 3375658, upload-time = "2024-07-01T09:45:25.292Z" }, - { url = "https://files.pythonhosted.org/packages/8a/25/1fc45761955f9359b1169aa75e241551e74ac01a09f487adaaf4c3472d11/pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856", size = 4332075, upload-time = "2024-07-01T09:45:27.94Z" }, - { url = "https://files.pythonhosted.org/packages/5e/dd/425b95d0151e1d6c951f45051112394f130df3da67363b6bc75dc4c27aba/pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f", size = 4444808, upload-time = "2024-07-01T09:45:30.305Z" }, - { url = "https://files.pythonhosted.org/packages/b1/84/9a15cc5726cbbfe7f9f90bfb11f5d028586595907cd093815ca6644932e3/pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b", size = 4356290, upload-time = "2024-07-01T09:45:32.868Z" }, - { url = "https://files.pythonhosted.org/packages/b5/5b/6651c288b08df3b8c1e2f8c1152201e0b25d240e22ddade0f1e242fc9fa0/pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc", size = 4525163, upload-time = "2024-07-01T09:45:35.279Z" }, - { url = "https://files.pythonhosted.org/packages/07/8b/34854bf11a83c248505c8cb0fcf8d3d0b459a2246c8809b967963b6b12ae/pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e", size = 4463100, upload-time = "2024-07-01T09:45:37.74Z" }, - { url = "https://files.pythonhosted.org/packages/78/63/0632aee4e82476d9cbe5200c0cdf9ba41ee04ed77887432845264d81116d/pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46", size = 4592880, upload-time = "2024-07-01T09:45:39.89Z" }, - { url = "https://files.pythonhosted.org/packages/df/56/b8663d7520671b4398b9d97e1ed9f583d4afcbefbda3c6188325e8c297bd/pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984", size = 2235218, upload-time = "2024-07-01T09:45:42.771Z" }, - { url = "https://files.pythonhosted.org/packages/f4/72/0203e94a91ddb4a9d5238434ae6c1ca10e610e8487036132ea9bf806ca2a/pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141", size = 2554487, upload-time = "2024-07-01T09:45:45.176Z" }, - { url = "https://files.pythonhosted.org/packages/bd/52/7e7e93d7a6e4290543f17dc6f7d3af4bd0b3dd9926e2e8a35ac2282bc5f4/pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1", size = 2243219, upload-time = "2024-07-01T09:45:47.274Z" }, - { url = "https://files.pythonhosted.org/packages/a7/62/c9449f9c3043c37f73e7487ec4ef0c03eb9c9afc91a92b977a67b3c0bbc5/pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c", size = 3509265, upload-time = "2024-07-01T09:45:49.812Z" }, - { url = "https://files.pythonhosted.org/packages/f4/5f/491dafc7bbf5a3cc1845dc0430872e8096eb9e2b6f8161509d124594ec2d/pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be", size = 3375655, upload-time = "2024-07-01T09:45:52.462Z" }, - { url = "https://files.pythonhosted.org/packages/73/d5/c4011a76f4207a3c151134cd22a1415741e42fa5ddecec7c0182887deb3d/pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3", size = 4340304, upload-time = "2024-07-01T09:45:55.006Z" }, - { url = "https://files.pythonhosted.org/packages/ac/10/c67e20445a707f7a610699bba4fe050583b688d8cd2d202572b257f46600/pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6", size = 4452804, upload-time = "2024-07-01T09:45:58.437Z" }, - { url = "https://files.pythonhosted.org/packages/a9/83/6523837906d1da2b269dee787e31df3b0acb12e3d08f024965a3e7f64665/pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe", size = 4365126, upload-time = "2024-07-01T09:46:00.713Z" }, - { url = "https://files.pythonhosted.org/packages/ba/e5/8c68ff608a4203085158cff5cc2a3c534ec384536d9438c405ed6370d080/pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319", size = 4533541, upload-time = "2024-07-01T09:46:03.235Z" }, - { url = "https://files.pythonhosted.org/packages/f4/7c/01b8dbdca5bc6785573f4cee96e2358b0918b7b2c7b60d8b6f3abf87a070/pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d", size = 4471616, upload-time = "2024-07-01T09:46:05.356Z" }, - { url = "https://files.pythonhosted.org/packages/c8/57/2899b82394a35a0fbfd352e290945440e3b3785655a03365c0ca8279f351/pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696", size = 4600802, upload-time = "2024-07-01T09:46:08.145Z" }, - { url = "https://files.pythonhosted.org/packages/4d/d7/a44f193d4c26e58ee5d2d9db3d4854b2cfb5b5e08d360a5e03fe987c0086/pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496", size = 2235213, upload-time = "2024-07-01T09:46:10.211Z" }, - { url = "https://files.pythonhosted.org/packages/c1/d0/5866318eec2b801cdb8c82abf190c8343d8a1cd8bf5a0c17444a6f268291/pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91", size = 2554498, upload-time = "2024-07-01T09:46:12.685Z" }, - { url = "https://files.pythonhosted.org/packages/d4/c8/310ac16ac2b97e902d9eb438688de0d961660a87703ad1561fd3dfbd2aa0/pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22", size = 2243219, upload-time = "2024-07-01T09:46:14.83Z" }, - { url = "https://files.pythonhosted.org/packages/05/cb/0353013dc30c02a8be34eb91d25e4e4cf594b59e5a55ea1128fde1e5f8ea/pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", size = 3509350, upload-time = "2024-07-01T09:46:17.177Z" }, - { url = "https://files.pythonhosted.org/packages/e7/cf/5c558a0f247e0bf9cec92bff9b46ae6474dd736f6d906315e60e4075f737/pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", size = 3374980, upload-time = "2024-07-01T09:46:19.169Z" }, - { url = "https://files.pythonhosted.org/packages/84/48/6e394b86369a4eb68b8a1382c78dc092245af517385c086c5094e3b34428/pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", size = 4343799, upload-time = "2024-07-01T09:46:21.883Z" }, - { url = "https://files.pythonhosted.org/packages/3b/f3/a8c6c11fa84b59b9df0cd5694492da8c039a24cd159f0f6918690105c3be/pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", size = 4459973, upload-time = "2024-07-01T09:46:24.321Z" }, - { url = "https://files.pythonhosted.org/packages/7d/1b/c14b4197b80150fb64453585247e6fb2e1d93761fa0fa9cf63b102fde822/pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", size = 4370054, upload-time = "2024-07-01T09:46:26.825Z" }, - { url = "https://files.pythonhosted.org/packages/55/77/40daddf677897a923d5d33329acd52a2144d54a9644f2a5422c028c6bf2d/pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", size = 4539484, upload-time = "2024-07-01T09:46:29.355Z" }, - { url = "https://files.pythonhosted.org/packages/40/54/90de3e4256b1207300fb2b1d7168dd912a2fb4b2401e439ba23c2b2cabde/pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", size = 4477375, upload-time = "2024-07-01T09:46:31.756Z" }, - { url = "https://files.pythonhosted.org/packages/13/24/1bfba52f44193860918ff7c93d03d95e3f8748ca1de3ceaf11157a14cf16/pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", size = 4608773, upload-time = "2024-07-01T09:46:33.73Z" }, - { url = "https://files.pythonhosted.org/packages/55/04/5e6de6e6120451ec0c24516c41dbaf80cce1b6451f96561235ef2429da2e/pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", size = 2235690, upload-time = "2024-07-01T09:46:36.587Z" }, - { url = "https://files.pythonhosted.org/packages/74/0a/d4ce3c44bca8635bd29a2eab5aa181b654a734a29b263ca8efe013beea98/pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", size = 2554951, upload-time = "2024-07-01T09:46:38.777Z" }, - { url = "https://files.pythonhosted.org/packages/b5/ca/184349ee40f2e92439be9b3502ae6cfc43ac4b50bc4fc6b3de7957563894/pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", size = 2243427, upload-time = "2024-07-01T09:46:43.15Z" }, - { url = "https://files.pythonhosted.org/packages/c3/00/706cebe7c2c12a6318aabe5d354836f54adff7156fd9e1bd6c89f4ba0e98/pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3", size = 3525685, upload-time = "2024-07-01T09:46:45.194Z" }, - { url = "https://files.pythonhosted.org/packages/cf/76/f658cbfa49405e5ecbfb9ba42d07074ad9792031267e782d409fd8fe7c69/pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb", size = 3374883, upload-time = "2024-07-01T09:46:47.331Z" }, - { url = "https://files.pythonhosted.org/packages/46/2b/99c28c4379a85e65378211971c0b430d9c7234b1ec4d59b2668f6299e011/pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70", size = 4339837, upload-time = "2024-07-01T09:46:49.647Z" }, - { url = "https://files.pythonhosted.org/packages/f1/74/b1ec314f624c0c43711fdf0d8076f82d9d802afd58f1d62c2a86878e8615/pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be", size = 4455562, upload-time = "2024-07-01T09:46:51.811Z" }, - { url = "https://files.pythonhosted.org/packages/4a/2a/4b04157cb7b9c74372fa867096a1607e6fedad93a44deeff553ccd307868/pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0", size = 4366761, upload-time = "2024-07-01T09:46:53.961Z" }, - { url = "https://files.pythonhosted.org/packages/ac/7b/8f1d815c1a6a268fe90481232c98dd0e5fa8c75e341a75f060037bd5ceae/pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc", size = 4536767, upload-time = "2024-07-01T09:46:56.664Z" }, - { url = "https://files.pythonhosted.org/packages/e5/77/05fa64d1f45d12c22c314e7b97398ffb28ef2813a485465017b7978b3ce7/pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a", size = 4477989, upload-time = "2024-07-01T09:46:58.977Z" }, - { url = "https://files.pythonhosted.org/packages/12/63/b0397cfc2caae05c3fb2f4ed1b4fc4fc878f0243510a7a6034ca59726494/pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309", size = 4610255, upload-time = "2024-07-01T09:47:01.189Z" }, - { url = "https://files.pythonhosted.org/packages/7b/f9/cfaa5082ca9bc4a6de66ffe1c12c2d90bf09c309a5f52b27759a596900e7/pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060", size = 2235603, upload-time = "2024-07-01T09:47:03.918Z" }, - { url = "https://files.pythonhosted.org/packages/01/6a/30ff0eef6e0c0e71e55ded56a38d4859bf9d3634a94a88743897b5f96936/pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea", size = 2554972, upload-time = "2024-07-01T09:47:06.152Z" }, - { url = "https://files.pythonhosted.org/packages/48/2c/2e0a52890f269435eee38b21c8218e102c621fe8d8df8b9dd06fabf879ba/pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d", size = 2243375, upload-time = "2024-07-01T09:47:09.065Z" }, - { url = "https://files.pythonhosted.org/packages/38/30/095d4f55f3a053392f75e2eae45eba3228452783bab3d9a920b951ac495c/pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4", size = 3493889, upload-time = "2024-07-01T09:48:04.815Z" }, - { url = "https://files.pythonhosted.org/packages/f3/e8/4ff79788803a5fcd5dc35efdc9386af153569853767bff74540725b45863/pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da", size = 3346160, upload-time = "2024-07-01T09:48:07.206Z" }, - { url = "https://files.pythonhosted.org/packages/d7/ac/4184edd511b14f760c73f5bb8a5d6fd85c591c8aff7c2229677a355c4179/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026", size = 3435020, upload-time = "2024-07-01T09:48:09.66Z" }, - { url = "https://files.pythonhosted.org/packages/da/21/1749cd09160149c0a246a81d646e05f35041619ce76f6493d6a96e8d1103/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e", size = 3490539, upload-time = "2024-07-01T09:48:12.529Z" }, - { url = "https://files.pythonhosted.org/packages/b6/f5/f71fe1888b96083b3f6dfa0709101f61fc9e972c0c8d04e9d93ccef2a045/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5", size = 3476125, upload-time = "2024-07-01T09:48:14.891Z" }, - { url = "https://files.pythonhosted.org/packages/96/b9/c0362c54290a31866c3526848583a2f45a535aa9d725fd31e25d318c805f/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885", size = 3579373, upload-time = "2024-07-01T09:48:17.601Z" }, - { url = "https://files.pythonhosted.org/packages/52/3b/ce7a01026a7cf46e5452afa86f97a5e88ca97f562cafa76570178ab56d8d/pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5", size = 2554661, upload-time = "2024-07-01T09:48:20.293Z" }, + { url = "https://files.pythonhosted.org/packages/4c/5d/45a3553a253ac8763f3561371432a90bdbe6000fbdcf1397ffe502aa206c/pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860", size = 5316554, upload-time = "2025-07-01T09:13:39.342Z" }, + { url = "https://files.pythonhosted.org/packages/7c/c8/67c12ab069ef586a25a4a79ced553586748fad100c77c0ce59bb4983ac98/pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad", size = 4686548, upload-time = "2025-07-01T09:13:41.835Z" }, + { url = "https://files.pythonhosted.org/packages/2f/bd/6741ebd56263390b382ae4c5de02979af7f8bd9807346d068700dd6d5cf9/pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0", size = 5859742, upload-time = "2025-07-03T13:09:47.439Z" }, + { url = "https://files.pythonhosted.org/packages/ca/0b/c412a9e27e1e6a829e6ab6c2dca52dd563efbedf4c9c6aa453d9a9b77359/pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b", size = 7633087, upload-time = "2025-07-03T13:09:51.796Z" }, + { url = "https://files.pythonhosted.org/packages/59/9d/9b7076aaf30f5dd17e5e5589b2d2f5a5d7e30ff67a171eb686e4eecc2adf/pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50", size = 5963350, upload-time = "2025-07-01T09:13:43.865Z" }, + { url = "https://files.pythonhosted.org/packages/f0/16/1a6bf01fb622fb9cf5c91683823f073f053005c849b1f52ed613afcf8dae/pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae", size = 6631840, upload-time = "2025-07-01T09:13:46.161Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e6/6ff7077077eb47fde78739e7d570bdcd7c10495666b6afcd23ab56b19a43/pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9", size = 6074005, upload-time = "2025-07-01T09:13:47.829Z" }, + { url = "https://files.pythonhosted.org/packages/c3/3a/b13f36832ea6d279a697231658199e0a03cd87ef12048016bdcc84131601/pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e", size = 6708372, upload-time = "2025-07-01T09:13:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/6c/e4/61b2e1a7528740efbc70b3d581f33937e38e98ef3d50b05007267a55bcb2/pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6", size = 6277090, upload-time = "2025-07-01T09:13:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/a9/d3/60c781c83a785d6afbd6a326ed4d759d141de43aa7365725cbcd65ce5e54/pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f", size = 6985988, upload-time = "2025-07-01T09:13:55.699Z" }, + { url = "https://files.pythonhosted.org/packages/9f/28/4f4a0203165eefb3763939c6789ba31013a2e90adffb456610f30f613850/pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f", size = 2422899, upload-time = "2025-07-01T09:13:57.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531, upload-time = "2025-07-01T09:13:59.203Z" }, + { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560, upload-time = "2025-07-01T09:14:01.101Z" }, + { url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978, upload-time = "2025-07-03T13:09:55.638Z" }, + { url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168, upload-time = "2025-07-03T13:10:00.37Z" }, + { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053, upload-time = "2025-07-01T09:14:04.491Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273, upload-time = "2025-07-01T09:14:06.235Z" }, + { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043, upload-time = "2025-07-01T09:14:07.978Z" }, + { url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94", size = 6715516, upload-time = "2025-07-01T09:14:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0", size = 6274768, upload-time = "2025-07-01T09:14:11.921Z" }, + { url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac", size = 6986055, upload-time = "2025-07-01T09:14:13.623Z" }, + { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079, upload-time = "2025-07-01T09:14:15.268Z" }, + { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, + { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" }, + { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" }, + { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, + { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358, upload-time = "2025-07-01T09:14:27.053Z" }, + { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079, upload-time = "2025-07-01T09:14:30.104Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324, upload-time = "2025-07-01T09:14:31.899Z" }, + { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, + { url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328, upload-time = "2025-07-01T09:14:35.276Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652, upload-time = "2025-07-01T09:14:37.203Z" }, + { url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443, upload-time = "2025-07-01T09:14:39.344Z" }, + { url = "https://files.pythonhosted.org/packages/01/f4/91d5b3ffa718df2f53b0dc109877993e511f4fd055d7e9508682e8aba092/pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c", size = 5278474, upload-time = "2025-07-01T09:14:41.843Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0e/37d7d3eca6c879fbd9dba21268427dffda1ab00d4eb05b32923d4fbe3b12/pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd", size = 4686038, upload-time = "2025-07-01T09:14:44.008Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b0/3426e5c7f6565e752d81221af9d3676fdbb4f352317ceafd42899aaf5d8a/pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e", size = 5864407, upload-time = "2025-07-03T13:10:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c1/c6c423134229f2a221ee53f838d4be9d82bab86f7e2f8e75e47b6bf6cd77/pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1", size = 7639094, upload-time = "2025-07-03T13:10:21.857Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/09e6746630fe6372c67c648ff9deae52a2bc20897d51fa293571977ceb5d/pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805", size = 5973503, upload-time = "2025-07-01T09:14:45.698Z" }, + { url = "https://files.pythonhosted.org/packages/d5/1c/a2a29649c0b1983d3ef57ee87a66487fdeb45132df66ab30dd37f7dbe162/pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8", size = 6642574, upload-time = "2025-07-01T09:14:47.415Z" }, + { url = "https://files.pythonhosted.org/packages/36/de/d5cc31cc4b055b6c6fd990e3e7f0f8aaf36229a2698501bcb0cdf67c7146/pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", size = 6084060, upload-time = "2025-07-01T09:14:49.636Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ea/502d938cbaeec836ac28a9b730193716f0114c41325db428e6b280513f09/pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b", size = 6721407, upload-time = "2025-07-01T09:14:51.962Z" }, + { url = "https://files.pythonhosted.org/packages/45/9c/9c5e2a73f125f6cbc59cc7087c8f2d649a7ae453f83bd0362ff7c9e2aee2/pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3", size = 6273841, upload-time = "2025-07-01T09:14:54.142Z" }, + { url = "https://files.pythonhosted.org/packages/23/85/397c73524e0cd212067e0c969aa245b01d50183439550d24d9f55781b776/pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51", size = 6978450, upload-time = "2025-07-01T09:14:56.436Z" }, + { url = "https://files.pythonhosted.org/packages/17/d2/622f4547f69cd173955194b78e4d19ca4935a1b0f03a302d655c9f6aae65/pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580", size = 2423055, upload-time = "2025-07-01T09:14:58.072Z" }, + { url = "https://files.pythonhosted.org/packages/dd/80/a8a2ac21dda2e82480852978416cfacd439a4b490a501a288ecf4fe2532d/pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e", size = 5281110, upload-time = "2025-07-01T09:14:59.79Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/b79754ca790f315918732e18f82a8146d33bcd7f4494380457ea89eb883d/pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d", size = 4689547, upload-time = "2025-07-01T09:15:01.648Z" }, + { url = "https://files.pythonhosted.org/packages/49/20/716b8717d331150cb00f7fdd78169c01e8e0c219732a78b0e59b6bdb2fd6/pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced", size = 5901554, upload-time = "2025-07-03T13:10:27.018Z" }, + { url = "https://files.pythonhosted.org/packages/74/cf/a9f3a2514a65bb071075063a96f0a5cf949c2f2fce683c15ccc83b1c1cab/pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c", size = 7669132, upload-time = "2025-07-03T13:10:33.01Z" }, + { url = "https://files.pythonhosted.org/packages/98/3c/da78805cbdbee9cb43efe8261dd7cc0b4b93f2ac79b676c03159e9db2187/pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8", size = 6005001, upload-time = "2025-07-01T09:15:03.365Z" }, + { url = "https://files.pythonhosted.org/packages/6c/fa/ce044b91faecf30e635321351bba32bab5a7e034c60187fe9698191aef4f/pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59", size = 6668814, upload-time = "2025-07-01T09:15:05.655Z" }, + { url = "https://files.pythonhosted.org/packages/7b/51/90f9291406d09bf93686434f9183aba27b831c10c87746ff49f127ee80cb/pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe", size = 6113124, upload-time = "2025-07-01T09:15:07.358Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5a/6fec59b1dfb619234f7636d4157d11fb4e196caeee220232a8d2ec48488d/pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c", size = 6747186, upload-time = "2025-07-01T09:15:09.317Z" }, + { url = "https://files.pythonhosted.org/packages/49/6b/00187a044f98255225f172de653941e61da37104a9ea60e4f6887717e2b5/pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788", size = 6277546, upload-time = "2025-07-01T09:15:11.311Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5c/6caaba7e261c0d75bab23be79f1d06b5ad2a2ae49f028ccec801b0e853d6/pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31", size = 6985102, upload-time = "2025-07-01T09:15:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/f3/7e/b623008460c09a0cb38263c93b828c666493caee2eb34ff67f778b87e58c/pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e", size = 2424803, upload-time = "2025-07-01T09:15:15.695Z" }, + { url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12", size = 5278520, upload-time = "2025-07-01T09:15:17.429Z" }, + { url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a", size = 4686116, upload-time = "2025-07-01T09:15:19.423Z" }, + { url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632", size = 5864597, upload-time = "2025-07-03T13:10:38.404Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673", size = 7638246, upload-time = "2025-07-03T13:10:44.987Z" }, + { url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027", size = 5973336, upload-time = "2025-07-01T09:15:21.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77", size = 6642699, upload-time = "2025-07-01T09:15:23.186Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d2/a9a4f280c6aefedce1e8f615baaa5474e0701d86dd6f1dede66726462bbd/pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874", size = 6083789, upload-time = "2025-07-01T09:15:25.1Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/86b0cd9dbb683a9d5e960b66c7379e821a19be4ac5810e2e5a715c09a0c0/pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a", size = 6720386, upload-time = "2025-07-01T09:15:27.378Z" }, + { url = "https://files.pythonhosted.org/packages/e7/95/88efcaf384c3588e24259c4203b909cbe3e3c2d887af9e938c2022c9dd48/pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214", size = 6370911, upload-time = "2025-07-01T09:15:29.294Z" }, + { url = "https://files.pythonhosted.org/packages/2e/cc/934e5820850ec5eb107e7b1a72dd278140731c669f396110ebc326f2a503/pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635", size = 7117383, upload-time = "2025-07-01T09:15:31.128Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e9/9c0a616a71da2a5d163aa37405e8aced9a906d574b4a214bede134e731bc/pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6", size = 2511385, upload-time = "2025-07-01T09:15:33.328Z" }, + { url = "https://files.pythonhosted.org/packages/1a/33/c88376898aff369658b225262cd4f2659b13e8178e7534df9e6e1fa289f6/pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae", size = 5281129, upload-time = "2025-07-01T09:15:35.194Z" }, + { url = "https://files.pythonhosted.org/packages/1f/70/d376247fb36f1844b42910911c83a02d5544ebd2a8bad9efcc0f707ea774/pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653", size = 4689580, upload-time = "2025-07-01T09:15:37.114Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1c/537e930496149fbac69efd2fc4329035bbe2e5475b4165439e3be9cb183b/pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6", size = 5902860, upload-time = "2025-07-03T13:10:50.248Z" }, + { url = "https://files.pythonhosted.org/packages/bd/57/80f53264954dcefeebcf9dae6e3eb1daea1b488f0be8b8fef12f79a3eb10/pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36", size = 7670694, upload-time = "2025-07-03T13:10:56.432Z" }, + { url = "https://files.pythonhosted.org/packages/70/ff/4727d3b71a8578b4587d9c276e90efad2d6fe0335fd76742a6da08132e8c/pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b", size = 6005888, upload-time = "2025-07-01T09:15:39.436Z" }, + { url = "https://files.pythonhosted.org/packages/05/ae/716592277934f85d3be51d7256f3636672d7b1abfafdc42cf3f8cbd4b4c8/pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477", size = 6670330, upload-time = "2025-07-01T09:15:41.269Z" }, + { url = "https://files.pythonhosted.org/packages/e7/bb/7fe6cddcc8827b01b1a9766f5fdeb7418680744f9082035bdbabecf1d57f/pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50", size = 6114089, upload-time = "2025-07-01T09:15:43.13Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f5/06bfaa444c8e80f1a8e4bff98da9c83b37b5be3b1deaa43d27a0db37ef84/pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b", size = 6748206, upload-time = "2025-07-01T09:15:44.937Z" }, + { url = "https://files.pythonhosted.org/packages/f0/77/bc6f92a3e8e6e46c0ca78abfffec0037845800ea38c73483760362804c41/pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12", size = 6377370, upload-time = "2025-07-01T09:15:46.673Z" }, + { url = "https://files.pythonhosted.org/packages/4a/82/3a721f7d69dca802befb8af08b7c79ebcab461007ce1c18bd91a5d5896f9/pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db", size = 7121500, upload-time = "2025-07-01T09:15:48.512Z" }, + { url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835, upload-time = "2025-07-01T09:15:50.399Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8b/209bd6b62ce8367f47e68a218bffac88888fdf2c9fcf1ecadc6c3ec1ebc7/pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967", size = 5270556, upload-time = "2025-07-01T09:16:09.961Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e6/231a0b76070c2cfd9e260a7a5b504fb72da0a95279410fa7afd99d9751d6/pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe", size = 4654625, upload-time = "2025-07-01T09:16:11.913Z" }, + { url = "https://files.pythonhosted.org/packages/13/f4/10cf94fda33cb12765f2397fc285fa6d8eb9c29de7f3185165b702fc7386/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c", size = 4874207, upload-time = "2025-07-03T13:11:10.201Z" }, + { url = "https://files.pythonhosted.org/packages/72/c9/583821097dc691880c92892e8e2d41fe0a5a3d6021f4963371d2f6d57250/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25", size = 6583939, upload-time = "2025-07-03T13:11:15.68Z" }, + { url = "https://files.pythonhosted.org/packages/3b/8e/5c9d410f9217b12320efc7c413e72693f48468979a013ad17fd690397b9a/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27", size = 4957166, upload-time = "2025-07-01T09:16:13.74Z" }, + { url = "https://files.pythonhosted.org/packages/62/bb/78347dbe13219991877ffb3a91bf09da8317fbfcd4b5f9140aeae020ad71/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a", size = 5581482, upload-time = "2025-07-01T09:16:16.107Z" }, + { url = "https://files.pythonhosted.org/packages/d9/28/1000353d5e61498aaeaaf7f1e4b49ddb05f2c6575f9d4f9f914a3538b6e1/pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f", size = 6984596, upload-time = "2025-07-01T09:16:18.07Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566, upload-time = "2025-07-01T09:16:19.801Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618, upload-time = "2025-07-01T09:16:21.818Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248, upload-time = "2025-07-03T13:11:20.738Z" }, + { url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963, upload-time = "2025-07-03T13:11:26.283Z" }, + { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170, upload-time = "2025-07-01T09:16:23.762Z" }, + { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505, upload-time = "2025-07-01T09:16:25.593Z" }, + { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598, upload-time = "2025-07-01T09:16:27.732Z" }, ] [[package]] @@ -1213,91 +1516,116 @@ wheels = [ [[package]] name = "propcache" -version = "0.3.2" +version = "0.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/14/510deed325e262afeb8b360043c5d7c960da7d3ecd6d6f9496c9c56dc7f4/propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770", size = 73178, upload-time = "2025-06-09T22:53:40.126Z" }, - { url = "https://files.pythonhosted.org/packages/cd/4e/ad52a7925ff01c1325653a730c7ec3175a23f948f08626a534133427dcff/propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3", size = 43133, upload-time = "2025-06-09T22:53:41.965Z" }, - { url = "https://files.pythonhosted.org/packages/63/7c/e9399ba5da7780871db4eac178e9c2e204c23dd3e7d32df202092a1ed400/propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3", size = 43039, upload-time = "2025-06-09T22:53:43.268Z" }, - { url = "https://files.pythonhosted.org/packages/22/e1/58da211eb8fdc6fc854002387d38f415a6ca5f5c67c1315b204a5d3e9d7a/propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e", size = 201903, upload-time = "2025-06-09T22:53:44.872Z" }, - { url = "https://files.pythonhosted.org/packages/c4/0a/550ea0f52aac455cb90111c8bab995208443e46d925e51e2f6ebdf869525/propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220", size = 213362, upload-time = "2025-06-09T22:53:46.707Z" }, - { url = "https://files.pythonhosted.org/packages/5a/af/9893b7d878deda9bb69fcf54600b247fba7317761b7db11fede6e0f28bd0/propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb", size = 210525, upload-time = "2025-06-09T22:53:48.547Z" }, - { url = "https://files.pythonhosted.org/packages/7c/bb/38fd08b278ca85cde36d848091ad2b45954bc5f15cce494bb300b9285831/propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614", size = 198283, upload-time = "2025-06-09T22:53:50.067Z" }, - { url = "https://files.pythonhosted.org/packages/78/8c/9fe55bd01d362bafb413dfe508c48753111a1e269737fa143ba85693592c/propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50", size = 191872, upload-time = "2025-06-09T22:53:51.438Z" }, - { url = "https://files.pythonhosted.org/packages/54/14/4701c33852937a22584e08abb531d654c8bcf7948a8f87ad0a4822394147/propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339", size = 199452, upload-time = "2025-06-09T22:53:53.229Z" }, - { url = "https://files.pythonhosted.org/packages/16/44/447f2253d859602095356007657ee535e0093215ea0b3d1d6a41d16e5201/propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0", size = 191567, upload-time = "2025-06-09T22:53:54.541Z" }, - { url = "https://files.pythonhosted.org/packages/f2/b3/e4756258749bb2d3b46defcff606a2f47410bab82be5824a67e84015b267/propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2", size = 193015, upload-time = "2025-06-09T22:53:56.44Z" }, - { url = "https://files.pythonhosted.org/packages/1e/df/e6d3c7574233164b6330b9fd697beeac402afd367280e6dc377bb99b43d9/propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7", size = 204660, upload-time = "2025-06-09T22:53:57.839Z" }, - { url = "https://files.pythonhosted.org/packages/b2/53/e4d31dd5170b4a0e2e6b730f2385a96410633b4833dc25fe5dffd1f73294/propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b", size = 206105, upload-time = "2025-06-09T22:53:59.638Z" }, - { url = "https://files.pythonhosted.org/packages/7f/fe/74d54cf9fbe2a20ff786e5f7afcfde446588f0cf15fb2daacfbc267b866c/propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c", size = 196980, upload-time = "2025-06-09T22:54:01.071Z" }, - { url = "https://files.pythonhosted.org/packages/22/ec/c469c9d59dada8a7679625e0440b544fe72e99311a4679c279562051f6fc/propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70", size = 37679, upload-time = "2025-06-09T22:54:03.003Z" }, - { url = "https://files.pythonhosted.org/packages/38/35/07a471371ac89d418f8d0b699c75ea6dca2041fbda360823de21f6a9ce0a/propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9", size = 41459, upload-time = "2025-06-09T22:54:04.134Z" }, - { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, - { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, - { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, - { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, - { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, - { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, - { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, - { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, - { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, - { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, - { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, - { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, - { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, - { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, - { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, - { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, - { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, - { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, - { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, - { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, - { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, - { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, - { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, - { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, - { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, - { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, - { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, - { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, - { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, - { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, - { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, - { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, - { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, - { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, - { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, - { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, - { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, - { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, - { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, - { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, - { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, - { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, - { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, - { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, - { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, - { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, - { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, - { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, - { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, - { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, - { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, - { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, - { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, - { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, - { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, - { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0e/934b541323035566a9af292dba85a195f7b78179114f2c6ebb24551118a9/propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db", size = 79534, upload-time = "2025-10-08T19:46:02.083Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6b/db0d03d96726d995dc7171286c6ba9d8d14251f37433890f88368951a44e/propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8", size = 45526, upload-time = "2025-10-08T19:46:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c3/82728404aea669e1600f304f2609cde9e665c18df5a11cdd57ed73c1dceb/propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925", size = 47263, upload-time = "2025-10-08T19:46:05.405Z" }, + { url = "https://files.pythonhosted.org/packages/df/1b/39313ddad2bf9187a1432654c38249bab4562ef535ef07f5eb6eb04d0b1b/propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21", size = 201012, upload-time = "2025-10-08T19:46:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/5b/01/f1d0b57d136f294a142acf97f4ed58c8e5b974c21e543000968357115011/propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5", size = 209491, upload-time = "2025-10-08T19:46:08.909Z" }, + { url = "https://files.pythonhosted.org/packages/a1/c8/038d909c61c5bb039070b3fb02ad5cccdb1dde0d714792e251cdb17c9c05/propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db", size = 215319, upload-time = "2025-10-08T19:46:10.7Z" }, + { url = "https://files.pythonhosted.org/packages/08/57/8c87e93142b2c1fa2408e45695205a7ba05fb5db458c0bf5c06ba0e09ea6/propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7", size = 196856, upload-time = "2025-10-08T19:46:12.003Z" }, + { url = "https://files.pythonhosted.org/packages/42/df/5615fec76aa561987a534759b3686008a288e73107faa49a8ae5795a9f7a/propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4", size = 193241, upload-time = "2025-10-08T19:46:13.495Z" }, + { url = "https://files.pythonhosted.org/packages/d5/21/62949eb3a7a54afe8327011c90aca7e03547787a88fb8bd9726806482fea/propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60", size = 190552, upload-time = "2025-10-08T19:46:14.938Z" }, + { url = "https://files.pythonhosted.org/packages/30/ee/ab4d727dd70806e5b4de96a798ae7ac6e4d42516f030ee60522474b6b332/propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f", size = 200113, upload-time = "2025-10-08T19:46:16.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0b/38b46208e6711b016aa8966a3ac793eee0d05c7159d8342aa27fc0bc365e/propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900", size = 200778, upload-time = "2025-10-08T19:46:18.023Z" }, + { url = "https://files.pythonhosted.org/packages/cf/81/5abec54355ed344476bee711e9f04815d4b00a311ab0535599204eecc257/propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c", size = 193047, upload-time = "2025-10-08T19:46:19.449Z" }, + { url = "https://files.pythonhosted.org/packages/ec/b6/1f237c04e32063cb034acd5f6ef34ef3a394f75502e72703545631ab1ef6/propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb", size = 38093, upload-time = "2025-10-08T19:46:20.643Z" }, + { url = "https://files.pythonhosted.org/packages/a6/67/354aac4e0603a15f76439caf0427781bcd6797f370377f75a642133bc954/propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37", size = 41638, upload-time = "2025-10-08T19:46:21.935Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e1/74e55b9fd1a4c209ff1a9a824bf6c8b3d1fc5a1ac3eabe23462637466785/propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581", size = 38229, upload-time = "2025-10-08T19:46:23.368Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] [[package]] @@ -1320,7 +1648,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1328,96 +1656,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/35/d319ed522433215526689bad428a94058b6dd12190ce7ddd78618ac14b28/pydantic-2.12.2.tar.gz", hash = "sha256:7b8fa15b831a4bbde9d5b84028641ac3080a4ca2cbd4a621a661687e741624fd", size = 816358, upload-time = "2025-10-14T15:02:21.842Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/6c/98/468cb649f208a6f1279448e6e5247b37ae79cf5e4041186f1e2ef3d16345/pydantic-2.12.2-py3-none-any.whl", hash = "sha256:25ff718ee909acd82f1ff9b1a4acfd781bb23ab3739adaa7144f19a6a4e231ae", size = 460628, upload-time = "2025-10-14T15:02:19.623Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, + { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, + { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, + { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, + { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, + { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, + { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, + { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, + { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, + { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, + { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, + { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, + { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, + { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, + { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, + { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, + { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, + { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, + { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, + { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, + { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, + { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, + { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, + { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, + { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, + { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, + { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, + { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, + { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, + { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, + { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, + { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, + { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, + { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, + { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, + { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, + { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, + { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, + { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, + { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, + { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, + { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, + { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, + { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, + { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, + { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, + { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, + { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, ] [[package]] @@ -1473,24 +1828,24 @@ wheels = [ [[package]] name = "pytest-codspeed" -version = "4.0.0" +version = "4.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi" }, { name = "pytest" }, { name = "rich" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/13/4989d50a3d6de9fb91de23f3b6ffce7c704f23516d308138242325a7c857/pytest_codspeed-4.0.0.tar.gz", hash = "sha256:0e9af08ca93ad897b376771db92693a81aa8990eecc2a778740412e00a6f6eaf", size = 107630, upload-time = "2025-07-10T08:37:53.518Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/ae/5d89a787151868d3ebd813b24a13fe4ac7e60148cf1b3454d351c8b99f69/pytest_codspeed-4.1.1.tar.gz", hash = "sha256:9acc3394cc8aafd4543193254831d87de6be79accfdbd43475919fdaa2fc8d81", size = 113149, upload-time = "2025-10-07T16:30:02.709Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/c9/c7116338e04d1c6bb43277c5f938fa7e5eb1df54b4cc0c298a428995296b/pytest_codspeed-4.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2517731b20a6aa9fe61d04822b802e1637ee67fd865189485b384a9d5897117f", size = 230409, upload-time = "2025-07-10T08:37:40.83Z" }, - { url = "https://files.pythonhosted.org/packages/e6/00/c21b0e2863c967c8d4dfa5bebdc5f0f2a9d6ab1cc7a39e111faf70a5880d/pytest_codspeed-4.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e5076bb5119d4f8248822b5cd6b768f70a18c7e1a7fbcd96a99cd4a6430096e", size = 221135, upload-time = "2025-07-10T08:37:42.255Z" }, - { url = "https://files.pythonhosted.org/packages/7f/e7/16b0f347fd910f2cc50e858094c17744d640e5ae71926c2c0ad762ecb7ec/pytest_codspeed-4.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:06b324acdfe2076a0c97a9d31e8645f820822d6f0e766c73426767ff887a9381", size = 230418, upload-time = "2025-07-10T08:37:43.602Z" }, - { url = "https://files.pythonhosted.org/packages/47/a7/2b3ac30e1e2b326abf370c8a6b4ed48a43d3a5491def7aaf67f7fbab5d6f/pytest_codspeed-4.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ebdac1a4d6138e1ca4f5391e7e3cafad6e3aa6d5660d1b243871b691bc1396c", size = 221131, upload-time = "2025-07-10T08:37:44.708Z" }, - { url = "https://files.pythonhosted.org/packages/11/e4/a9591949783cdea60d5f2a215d89c3e17af7b068f2613e38b1d46cb5b8e9/pytest_codspeed-4.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f3def79d4072867d038a33e7f35bc7fb1a2a75236a624b3a690c5540017cb38", size = 230601, upload-time = "2025-07-10T08:37:46.018Z" }, - { url = "https://files.pythonhosted.org/packages/16/fe/22caa7cfb6717d21ba14ffd3c0b013b2143a4c32225715f401489f6c32bc/pytest_codspeed-4.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01d29d4538c2d111c0034f71811bcce577304506d22af4dd65df87fadf3ab495", size = 221230, upload-time = "2025-07-10T08:37:46.997Z" }, - { url = "https://files.pythonhosted.org/packages/55/e2/0a2e703301f7560a456e343e1b31d01a2ddee96807db5ded65951bfa5b7a/pytest_codspeed-4.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90894c93c9e23f12487b7fdf16c28da8f6275d565056772072beb41a72a54cf9", size = 230591, upload-time = "2025-07-10T08:37:47.961Z" }, - { url = "https://files.pythonhosted.org/packages/17/fc/5fee0bcdada8ecb5a89088cd84af7e094652fc94bf414a96b49a874fd8be/pytest_codspeed-4.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:79e9c40852fa7fc76776db4f1d290eceaeee2d6c5d2dc95a66c7cc690d83889e", size = 221227, upload-time = "2025-07-10T08:37:49.113Z" }, - { url = "https://files.pythonhosted.org/packages/5f/e4/e3ddab5fd04febf6189d71bfa4ba2d7c05adaa7d692a6d6b1e8ed68de12d/pytest_codspeed-4.0.0-py3-none-any.whl", hash = "sha256:c5debd4b127dc1c507397a8304776f52cabbfa53aad6f51eae329a5489df1e06", size = 107084, upload-time = "2025-07-10T08:37:52.65Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b4/6e76539ef17e398c06464c6d92da1b8978b46180df5446cdb76e2293c254/pytest_codspeed-4.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa83a1a0aaeb6bdb9918a18294708eebe765a3b5a855adccf9213629d2a0d302", size = 261944, upload-time = "2025-10-07T16:29:50.024Z" }, + { url = "https://files.pythonhosted.org/packages/43/6a/ba8aca4a07bb8f19fdeba752c7276b35cb63ff743695b137c2f68a53cf21/pytest_codspeed-4.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e6fe213b2589ffe6f2189b3b21ca14717c9346b226e6028d2e2b4d4d7dac750f", size = 249258, upload-time = "2025-10-07T16:29:52.694Z" }, + { url = "https://files.pythonhosted.org/packages/5e/13/be5ab2167d7fb477ba86061dfb6dee3b6feb698ef7d35e8912ba0cce94d5/pytest_codspeed-4.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94b3bd5a71bfab4478e9a9b5058237cf2b34938570b43495093c2ea213175bd5", size = 261978, upload-time = "2025-10-07T16:29:53.671Z" }, + { url = "https://files.pythonhosted.org/packages/43/7f/60445f5e9bdaff4b9da1d7e4964e34d87bc160867aec2d99c1119e38b3f8/pytest_codspeed-4.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfc1efdbcc92fb4b4cbc8eaa8d7387664b063c17e025985ece4816100f1fff29", size = 249269, upload-time = "2025-10-07T16:29:54.907Z" }, + { url = "https://files.pythonhosted.org/packages/6e/bf/b1d78c982d575636b38688f284bfd6836f5f232d95cef661a6345a91cc7b/pytest_codspeed-4.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:506d446d2911e5188aca7be702c2850a9b8680a72ed241a633d7edaeef00ac13", size = 262202, upload-time = "2025-10-07T16:29:55.792Z" }, + { url = "https://files.pythonhosted.org/packages/45/23/9ace1fcd72a84d845f522e06badada7d85f6a5a4d4aed54509b51af87068/pytest_codspeed-4.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1773c74394c98317c6846e9eb60c352222c031bdf1ded109f5c35772a3ce6dc2", size = 249554, upload-time = "2025-10-07T16:29:56.681Z" }, + { url = "https://files.pythonhosted.org/packages/da/45/09fa57357d46e46de968554cd0a20e4a8b2a48971fec9564c215c67ebcde/pytest_codspeed-4.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f8af528f7f950cb745971fc1e9f59ebc52cc4c51a7eac7a931577fd55d21b94", size = 262209, upload-time = "2025-10-07T16:29:57.659Z" }, + { url = "https://files.pythonhosted.org/packages/65/e5/ac697d8e249be059d4bec9ebcb22a5626e0d18fd6944665b6d0edca3a508/pytest_codspeed-4.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:db8b2b71cabde1a7ae77a29a3ce67bcb852c28d5599b4eb7428fdb26cd067815", size = 249559, upload-time = "2025-10-07T16:29:58.58Z" }, + { url = "https://files.pythonhosted.org/packages/31/b0/6f0502ca6497e4c53f2f425b57854b759f29d186804a64d090d9ac2878ca/pytest_codspeed-4.1.1-py3-none-any.whl", hash = "sha256:a0a7aa318b09d87541f4f65db9cd473b53d4f1589598d883b238fe208ae2ac8b", size = 113601, upload-time = "2025-10-07T16:30:01.58Z" }, ] [[package]] @@ -1793,15 +2148,15 @@ wheels = [ [[package]] name = "rich" -version = "14.1.0" +version = "14.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, ] [[package]] @@ -1871,77 +2226,112 @@ wheels = [ [[package]] name = "tiktoken" -version = "0.11.0" +version = "0.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/86/ad0155a37c4f310935d5ac0b1ccf9bdb635dcb906e0a9a26b616dd55825a/tiktoken-0.11.0.tar.gz", hash = "sha256:3c518641aee1c52247c2b97e74d8d07d780092af79d5911a6ab5e79359d9b06a", size = 37648, upload-time = "2025-08-08T23:58:08.495Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/4d/c6a2e7dca2b4f2e9e0bfd62b3fe4f114322e2c028cfba905a72bc76ce479/tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8a9b517d6331d7103f8bef29ef93b3cca95fa766e293147fe7bacddf310d5917", size = 1059937, upload-time = "2025-08-08T23:57:28.57Z" }, - { url = "https://files.pythonhosted.org/packages/41/54/3739d35b9f94cb8dc7b0db2edca7192d5571606aa2369a664fa27e811804/tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b4ddb1849e6bf0afa6cc1c5d809fb980ca240a5fffe585a04e119519758788c0", size = 999230, upload-time = "2025-08-08T23:57:30.241Z" }, - { url = "https://files.pythonhosted.org/packages/dd/f4/ec8d43338d28d53513004ebf4cd83732a135d11011433c58bf045890cc10/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10331d08b5ecf7a780b4fe4d0281328b23ab22cdb4ff65e68d56caeda9940ecc", size = 1130076, upload-time = "2025-08-08T23:57:31.706Z" }, - { url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882", size = 1183942, upload-time = "2025-08-08T23:57:33.142Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c", size = 1244705, upload-time = "2025-08-08T23:57:34.594Z" }, - { url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1", size = 884152, upload-time = "2025-08-08T23:57:36.18Z" }, - { url = "https://files.pythonhosted.org/packages/8a/91/912b459799a025d2842566fe1e902f7f50d54a1ce8a0f236ab36b5bd5846/tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ae374c46afadad0f501046db3da1b36cd4dfbfa52af23c998773682446097cf", size = 1059743, upload-time = "2025-08-08T23:57:37.516Z" }, - { url = "https://files.pythonhosted.org/packages/8c/e9/6faa6870489ce64f5f75dcf91512bf35af5864583aee8fcb0dcb593121f5/tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25a512ff25dc6c85b58f5dd4f3d8c674dc05f96b02d66cdacf628d26a4e4866b", size = 999334, upload-time = "2025-08-08T23:57:38.595Z" }, - { url = "https://files.pythonhosted.org/packages/a1/3e/a05d1547cf7db9dc75d1461cfa7b556a3b48e0516ec29dfc81d984a145f6/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2130127471e293d385179c1f3f9cd445070c0772be73cdafb7cec9a3684c0458", size = 1129402, upload-time = "2025-08-08T23:57:39.627Z" }, - { url = "https://files.pythonhosted.org/packages/34/9a/db7a86b829e05a01fd4daa492086f708e0a8b53952e1dbc9d380d2b03677/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e43022bf2c33f733ea9b54f6a3f6b4354b909f5a73388fb1b9347ca54a069c", size = 1184046, upload-time = "2025-08-08T23:57:40.689Z" }, - { url = "https://files.pythonhosted.org/packages/9d/bb/52edc8e078cf062ed749248f1454e9e5cfd09979baadb830b3940e522015/tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:adb4e308eb64380dc70fa30493e21c93475eaa11669dea313b6bbf8210bfd013", size = 1244691, upload-time = "2025-08-08T23:57:42.251Z" }, - { url = "https://files.pythonhosted.org/packages/60/d9/884b6cd7ae2570ecdcaffa02b528522b18fef1cbbfdbcaa73799807d0d3b/tiktoken-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:ece6b76bfeeb61a125c44bbefdfccc279b5288e6007fbedc0d32bfec602df2f2", size = 884392, upload-time = "2025-08-08T23:57:43.628Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9e/eceddeffc169fc75fe0fd4f38471309f11cb1906f9b8aa39be4f5817df65/tiktoken-0.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fd9e6b23e860973cf9526544e220b223c60badf5b62e80a33509d6d40e6c8f5d", size = 1055199, upload-time = "2025-08-08T23:57:45.076Z" }, - { url = "https://files.pythonhosted.org/packages/4f/cf/5f02bfefffdc6b54e5094d2897bc80efd43050e5b09b576fd85936ee54bf/tiktoken-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76d53cee2da71ee2731c9caa747398762bda19d7f92665e882fef229cb0b5b", size = 996655, upload-time = "2025-08-08T23:57:46.304Z" }, - { url = "https://files.pythonhosted.org/packages/65/8e/c769b45ef379bc360c9978c4f6914c79fd432400a6733a8afc7ed7b0726a/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef72aab3ea240646e642413cb363b73869fed4e604dcfd69eec63dc54d603e8", size = 1128867, upload-time = "2025-08-08T23:57:47.438Z" }, - { url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd", size = 1183308, upload-time = "2025-08-08T23:57:48.566Z" }, - { url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e", size = 1244301, upload-time = "2025-08-08T23:57:49.642Z" }, - { url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f", size = 884282, upload-time = "2025-08-08T23:57:50.759Z" }, - { url = "https://files.pythonhosted.org/packages/cc/cd/a9034bcee638716d9310443818d73c6387a6a96db93cbcb0819b77f5b206/tiktoken-0.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a5f3f25ffb152ee7fec78e90a5e5ea5b03b4ea240beed03305615847f7a6ace2", size = 1055339, upload-time = "2025-08-08T23:57:51.802Z" }, - { url = "https://files.pythonhosted.org/packages/f1/91/9922b345f611b4e92581f234e64e9661e1c524875c8eadd513c4b2088472/tiktoken-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dc6e9ad16a2a75b4c4be7208055a1f707c9510541d94d9cc31f7fbdc8db41d8", size = 997080, upload-time = "2025-08-08T23:57:53.442Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9d/49cd047c71336bc4b4af460ac213ec1c457da67712bde59b892e84f1859f/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a0517634d67a8a48fd4a4ad73930c3022629a85a217d256a6e9b8b47439d1e4", size = 1128501, upload-time = "2025-08-08T23:57:54.808Z" }, - { url = "https://files.pythonhosted.org/packages/52/d5/a0dcdb40dd2ea357e83cb36258967f0ae96f5dd40c722d6e382ceee6bba9/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fb4effe60574675118b73c6fbfd3b5868e5d7a1f570d6cc0d18724b09ecf318", size = 1182743, upload-time = "2025-08-08T23:57:56.307Z" }, - { url = "https://files.pythonhosted.org/packages/3b/17/a0fc51aefb66b7b5261ca1314afa83df0106b033f783f9a7bcbe8e741494/tiktoken-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94f984c9831fd32688aef4348803b0905d4ae9c432303087bae370dc1381a2b8", size = 1244057, upload-time = "2025-08-08T23:57:57.628Z" }, - { url = "https://files.pythonhosted.org/packages/50/79/bcf350609f3a10f09fe4fc207f132085e497fdd3612f3925ab24d86a0ca0/tiktoken-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2177ffda31dec4023356a441793fed82f7af5291120751dee4d696414f54db0c", size = 883901, upload-time = "2025-08-08T23:57:59.359Z" }, + { url = "https://files.pythonhosted.org/packages/89/b3/2cb7c17b6c4cf8ca983204255d3f1d95eda7213e247e6947a0ee2c747a2c/tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3de02f5a491cfd179aec916eddb70331814bd6bf764075d39e21d5862e533970", size = 1051991, upload-time = "2025-10-06T20:21:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/27/0f/df139f1df5f6167194ee5ab24634582ba9a1b62c6b996472b0277ec80f66/tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6cfb6d9b7b54d20af21a912bfe63a2727d9cfa8fbda642fd8322c70340aad16", size = 995798, upload-time = "2025-10-06T20:21:35.579Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5d/26a691f28ab220d5edc09b9b787399b130f24327ef824de15e5d85ef21aa/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:cde24cdb1b8a08368f709124f15b36ab5524aac5fa830cc3fdce9c03d4fb8030", size = 1129865, upload-time = "2025-10-06T20:21:36.675Z" }, + { url = "https://files.pythonhosted.org/packages/b2/94/443fab3d4e5ebecac895712abd3849b8da93b7b7dec61c7db5c9c7ebe40c/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6de0da39f605992649b9cfa6f84071e3f9ef2cec458d08c5feb1b6f0ff62e134", size = 1152856, upload-time = "2025-10-06T20:21:37.873Z" }, + { url = "https://files.pythonhosted.org/packages/54/35/388f941251b2521c70dd4c5958e598ea6d2c88e28445d2fb8189eecc1dfc/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6faa0534e0eefbcafaccb75927a4a380463a2eaa7e26000f0173b920e98b720a", size = 1195308, upload-time = "2025-10-06T20:21:39.577Z" }, + { url = "https://files.pythonhosted.org/packages/f8/00/c6681c7f833dd410576183715a530437a9873fa910265817081f65f9105f/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:82991e04fc860afb933efb63957affc7ad54f83e2216fe7d319007dab1ba5892", size = 1255697, upload-time = "2025-10-06T20:21:41.154Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d2/82e795a6a9bafa034bf26a58e68fe9a89eeaaa610d51dbeb22106ba04f0a/tiktoken-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:6fb2995b487c2e31acf0a9e17647e3b242235a20832642bb7a9d1a181c0c1bb1", size = 879375, upload-time = "2025-10-06T20:21:43.201Z" }, + { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, + { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, ] [[package]] name = "tomli" -version = "2.2.1" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, - { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, - { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, - { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, - { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, - { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, - { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, - { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, - { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, ] [[package]] @@ -2135,103 +2525,248 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, ] +[[package]] +name = "xxhash" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/ee/f9f1d656ad168681bb0f6b092372c1e533c4416b8069b1896a175c46e484/xxhash-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:87ff03d7e35c61435976554477a7f4cd1704c3596a89a8300d5ce7fc83874a71", size = 32845, upload-time = "2025-10-02T14:33:51.573Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/93508d9460b292c74a09b83d16750c52a0ead89c51eea9951cb97a60d959/xxhash-3.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f572dfd3d0e2eb1a57511831cf6341242f5a9f8298a45862d085f5b93394a27d", size = 30807, upload-time = "2025-10-02T14:33:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/07/55/28c93a3662f2d200c70704efe74aab9640e824f8ce330d8d3943bf7c9b3c/xxhash-3.6.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:89952ea539566b9fed2bbd94e589672794b4286f342254fad28b149f9615fef8", size = 193786, upload-time = "2025-10-02T14:33:54.272Z" }, + { url = "https://files.pythonhosted.org/packages/c1/96/fec0be9bb4b8f5d9c57d76380a366f31a1781fb802f76fc7cda6c84893c7/xxhash-3.6.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e6f2ffb07a50b52465a1032c3cf1f4a5683f944acaca8a134a2f23674c2058", size = 212830, upload-time = "2025-10-02T14:33:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/c706845ba77b9611f81fd2e93fad9859346b026e8445e76f8c6fd057cc6d/xxhash-3.6.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b5b848ad6c16d308c3ac7ad4ba6bede80ed5df2ba8ed382f8932df63158dd4b2", size = 211606, upload-time = "2025-10-02T14:33:57.133Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/164126a2999e5045f04a69257eea946c0dc3e86541b400d4385d646b53d7/xxhash-3.6.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a034590a727b44dd8ac5914236a7b8504144447a9682586c3327e935f33ec8cc", size = 444872, upload-time = "2025-10-02T14:33:58.446Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4b/55ab404c56cd70a2cf5ecfe484838865d0fea5627365c6c8ca156bd09c8f/xxhash-3.6.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a8f1972e75ebdd161d7896743122834fe87378160c20e97f8b09166213bf8cc", size = 193217, upload-time = "2025-10-02T14:33:59.724Z" }, + { url = "https://files.pythonhosted.org/packages/45/e6/52abf06bac316db33aa269091ae7311bd53cfc6f4b120ae77bac1b348091/xxhash-3.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ee34327b187f002a596d7b167ebc59a1b729e963ce645964bbc050d2f1b73d07", size = 210139, upload-time = "2025-10-02T14:34:02.041Z" }, + { url = "https://files.pythonhosted.org/packages/34/37/db94d490b8691236d356bc249c08819cbcef9273a1a30acf1254ff9ce157/xxhash-3.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:339f518c3c7a850dd033ab416ea25a692759dc7478a71131fe8869010d2b75e4", size = 197669, upload-time = "2025-10-02T14:34:03.664Z" }, + { url = "https://files.pythonhosted.org/packages/b7/36/c4f219ef4a17a4f7a64ed3569bc2b5a9c8311abdb22249ac96093625b1a4/xxhash-3.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:bf48889c9630542d4709192578aebbd836177c9f7a4a2778a7d6340107c65f06", size = 210018, upload-time = "2025-10-02T14:34:05.325Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/bfac889a374fc2fc439a69223d1750eed2e18a7db8514737ab630534fa08/xxhash-3.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5576b002a56207f640636056b4160a378fe36a58db73ae5c27a7ec8db35f71d4", size = 413058, upload-time = "2025-10-02T14:34:06.925Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d1/555d8447e0dd32ad0930a249a522bb2e289f0d08b6b16204cfa42c1f5a0c/xxhash-3.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af1f3278bd02814d6dedc5dec397993b549d6f16c19379721e5a1d31e132c49b", size = 190628, upload-time = "2025-10-02T14:34:08.669Z" }, + { url = "https://files.pythonhosted.org/packages/d1/15/8751330b5186cedc4ed4b597989882ea05e0408b53fa47bcb46a6125bfc6/xxhash-3.6.0-cp310-cp310-win32.whl", hash = "sha256:aed058764db109dc9052720da65fafe84873b05eb8b07e5e653597951af57c3b", size = 30577, upload-time = "2025-10-02T14:34:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/bb/cc/53f87e8b5871a6eb2ff7e89c48c66093bda2be52315a8161ddc54ea550c4/xxhash-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e82da5670f2d0d98950317f82a0e4a0197150ff19a6df2ba40399c2a3b9ae5fb", size = 31487, upload-time = "2025-10-02T14:34:11.618Z" }, + { url = "https://files.pythonhosted.org/packages/9f/00/60f9ea3bb697667a14314d7269956f58bf56bb73864f8f8d52a3c2535e9a/xxhash-3.6.0-cp310-cp310-win_arm64.whl", hash = "sha256:4a082ffff8c6ac07707fb6b671caf7c6e020c75226c561830b73d862060f281d", size = 27863, upload-time = "2025-10-02T14:34:12.619Z" }, + { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, + { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, + { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, + { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, + { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, + { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, + { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, + { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, + { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, + { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, + { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, + { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, + { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, + { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, + { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, + { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, + { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, + { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, + { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, + { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, + { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, + { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, + { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, + { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, + { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, + { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, + { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, + { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, + { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, +] + [[package]] name = "yarl" -version = "1.20.1" +version = "1.22.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/65/7fed0d774abf47487c64be14e9223749468922817b5e8792b8a64792a1bb/yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4", size = 132910, upload-time = "2025-06-10T00:42:31.108Z" }, - { url = "https://files.pythonhosted.org/packages/8a/7b/988f55a52da99df9e56dc733b8e4e5a6ae2090081dc2754fc8fd34e60aa0/yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a", size = 90644, upload-time = "2025-06-10T00:42:33.851Z" }, - { url = "https://files.pythonhosted.org/packages/f7/de/30d98f03e95d30c7e3cc093759982d038c8833ec2451001d45ef4854edc1/yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed", size = 89322, upload-time = "2025-06-10T00:42:35.688Z" }, - { url = "https://files.pythonhosted.org/packages/e0/7a/f2f314f5ebfe9200724b0b748de2186b927acb334cf964fd312eb86fc286/yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e", size = 323786, upload-time = "2025-06-10T00:42:37.817Z" }, - { url = "https://files.pythonhosted.org/packages/15/3f/718d26f189db96d993d14b984ce91de52e76309d0fd1d4296f34039856aa/yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73", size = 319627, upload-time = "2025-06-10T00:42:39.937Z" }, - { url = "https://files.pythonhosted.org/packages/a5/76/8fcfbf5fa2369157b9898962a4a7d96764b287b085b5b3d9ffae69cdefd1/yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e", size = 339149, upload-time = "2025-06-10T00:42:42.627Z" }, - { url = "https://files.pythonhosted.org/packages/3c/95/d7fc301cc4661785967acc04f54a4a42d5124905e27db27bb578aac49b5c/yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8", size = 333327, upload-time = "2025-06-10T00:42:44.842Z" }, - { url = "https://files.pythonhosted.org/packages/65/94/e21269718349582eee81efc5c1c08ee71c816bfc1585b77d0ec3f58089eb/yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23", size = 326054, upload-time = "2025-06-10T00:42:47.149Z" }, - { url = "https://files.pythonhosted.org/packages/32/ae/8616d1f07853704523519f6131d21f092e567c5af93de7e3e94b38d7f065/yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70", size = 315035, upload-time = "2025-06-10T00:42:48.852Z" }, - { url = "https://files.pythonhosted.org/packages/48/aa/0ace06280861ef055855333707db5e49c6e3a08840a7ce62682259d0a6c0/yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb", size = 338962, upload-time = "2025-06-10T00:42:51.024Z" }, - { url = "https://files.pythonhosted.org/packages/20/52/1e9d0e6916f45a8fb50e6844f01cb34692455f1acd548606cbda8134cd1e/yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2", size = 335399, upload-time = "2025-06-10T00:42:53.007Z" }, - { url = "https://files.pythonhosted.org/packages/f2/65/60452df742952c630e82f394cd409de10610481d9043aa14c61bf846b7b1/yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30", size = 338649, upload-time = "2025-06-10T00:42:54.964Z" }, - { url = "https://files.pythonhosted.org/packages/7b/f5/6cd4ff38dcde57a70f23719a838665ee17079640c77087404c3d34da6727/yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309", size = 358563, upload-time = "2025-06-10T00:42:57.28Z" }, - { url = "https://files.pythonhosted.org/packages/d1/90/c42eefd79d0d8222cb3227bdd51b640c0c1d0aa33fe4cc86c36eccba77d3/yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24", size = 357609, upload-time = "2025-06-10T00:42:59.055Z" }, - { url = "https://files.pythonhosted.org/packages/03/c8/cea6b232cb4617514232e0f8a718153a95b5d82b5290711b201545825532/yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13", size = 350224, upload-time = "2025-06-10T00:43:01.248Z" }, - { url = "https://files.pythonhosted.org/packages/ce/a3/eaa0ab9712f1f3d01faf43cf6f1f7210ce4ea4a7e9b28b489a2261ca8db9/yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8", size = 81753, upload-time = "2025-06-10T00:43:03.486Z" }, - { url = "https://files.pythonhosted.org/packages/8f/34/e4abde70a9256465fe31c88ed02c3f8502b7b5dead693a4f350a06413f28/yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16", size = 86817, upload-time = "2025-06-10T00:43:05.231Z" }, - { url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e", size = 133833, upload-time = "2025-06-10T00:43:07.393Z" }, - { url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b", size = 91070, upload-time = "2025-06-10T00:43:09.538Z" }, - { url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b", size = 89818, upload-time = "2025-06-10T00:43:11.575Z" }, - { url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4", size = 347003, upload-time = "2025-06-10T00:43:14.088Z" }, - { url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1", size = 336537, upload-time = "2025-06-10T00:43:16.431Z" }, - { url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833", size = 362358, upload-time = "2025-06-10T00:43:18.704Z" }, - { url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d", size = 357362, upload-time = "2025-06-10T00:43:20.888Z" }, - { url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8", size = 348979, upload-time = "2025-06-10T00:43:23.169Z" }, - { url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf", size = 337274, upload-time = "2025-06-10T00:43:27.111Z" }, - { url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e", size = 363294, upload-time = "2025-06-10T00:43:28.96Z" }, - { url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389", size = 358169, upload-time = "2025-06-10T00:43:30.701Z" }, - { url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f", size = 362776, upload-time = "2025-06-10T00:43:32.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845", size = 381341, upload-time = "2025-06-10T00:43:34.543Z" }, - { url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1", size = 379988, upload-time = "2025-06-10T00:43:36.489Z" }, - { url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e", size = 371113, upload-time = "2025-06-10T00:43:38.592Z" }, - { url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773", size = 81485, upload-time = "2025-06-10T00:43:41.038Z" }, - { url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e", size = 86686, upload-time = "2025-06-10T00:43:42.692Z" }, - { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, - { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, - { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, - { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, - { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, - { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, - { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, - { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, - { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, - { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, - { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, - { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, - { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, - { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, - { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, - { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, - { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, - { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, - { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, - { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, - { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, - { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, - { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, - { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, - { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, - { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, - { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, - { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, - { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, - { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, - { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, - { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, - { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, - { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, - { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, - { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, - { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, - { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, - { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, - { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, - { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, - { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, - { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, - { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, - { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, - { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, - { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, - { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, - { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, - { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, - { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, + { url = "https://files.pythonhosted.org/packages/d1/43/a2204825342f37c337f5edb6637040fa14e365b2fcc2346960201d457579/yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e", size = 140517, upload-time = "2025-10-06T14:08:42.494Z" }, + { url = "https://files.pythonhosted.org/packages/44/6f/674f3e6f02266428c56f704cd2501c22f78e8b2eeb23f153117cc86fb28a/yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f", size = 93495, upload-time = "2025-10-06T14:08:46.2Z" }, + { url = "https://files.pythonhosted.org/packages/b8/12/5b274d8a0f30c07b91b2f02cba69152600b47830fcfb465c108880fcee9c/yarl-1.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf", size = 94400, upload-time = "2025-10-06T14:08:47.855Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7f/df1b6949b1fa1aa9ff6de6e2631876ad4b73c4437822026e85d8acb56bb1/yarl-1.22.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a", size = 347545, upload-time = "2025-10-06T14:08:49.683Z" }, + { url = "https://files.pythonhosted.org/packages/84/09/f92ed93bd6cd77872ab6c3462df45ca45cd058d8f1d0c9b4f54c1704429f/yarl-1.22.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c", size = 319598, upload-time = "2025-10-06T14:08:51.215Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/ac3f3feae7d522cf7ccec3d340bb0b2b61c56cb9767923df62a135092c6b/yarl-1.22.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147", size = 363893, upload-time = "2025-10-06T14:08:53.144Z" }, + { url = "https://files.pythonhosted.org/packages/06/49/f3219097403b9c84a4d079b1d7bda62dd9b86d0d6e4428c02d46ab2c77fc/yarl-1.22.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb", size = 371240, upload-time = "2025-10-06T14:08:55.036Z" }, + { url = "https://files.pythonhosted.org/packages/35/9f/06b765d45c0e44e8ecf0fe15c9eacbbde342bb5b7561c46944f107bfb6c3/yarl-1.22.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6", size = 346965, upload-time = "2025-10-06T14:08:56.722Z" }, + { url = "https://files.pythonhosted.org/packages/c5/69/599e7cea8d0fcb1694323b0db0dda317fa3162f7b90166faddecf532166f/yarl-1.22.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0", size = 342026, upload-time = "2025-10-06T14:08:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/95/6f/9dfd12c8bc90fea9eab39832ee32ea48f8e53d1256252a77b710c065c89f/yarl-1.22.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda", size = 335637, upload-time = "2025-10-06T14:09:00.506Z" }, + { url = "https://files.pythonhosted.org/packages/57/2e/34c5b4eb9b07e16e873db5b182c71e5f06f9b5af388cdaa97736d79dd9a6/yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc", size = 359082, upload-time = "2025-10-06T14:09:01.936Z" }, + { url = "https://files.pythonhosted.org/packages/31/71/fa7e10fb772d273aa1f096ecb8ab8594117822f683bab7d2c5a89914c92a/yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737", size = 357811, upload-time = "2025-10-06T14:09:03.445Z" }, + { url = "https://files.pythonhosted.org/packages/26/da/11374c04e8e1184a6a03cf9c8f5688d3e5cec83ed6f31ad3481b3207f709/yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467", size = 351223, upload-time = "2025-10-06T14:09:05.401Z" }, + { url = "https://files.pythonhosted.org/packages/82/8f/e2d01f161b0c034a30410e375e191a5d27608c1f8693bab1a08b089ca096/yarl-1.22.0-cp310-cp310-win32.whl", hash = "sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea", size = 82118, upload-time = "2025-10-06T14:09:11.148Z" }, + { url = "https://files.pythonhosted.org/packages/62/46/94c76196642dbeae634c7a61ba3da88cd77bed875bf6e4a8bed037505aa6/yarl-1.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca", size = 86852, upload-time = "2025-10-06T14:09:12.958Z" }, + { url = "https://files.pythonhosted.org/packages/af/af/7df4f179d3b1a6dcb9a4bd2ffbc67642746fcafdb62580e66876ce83fff4/yarl-1.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b", size = 82012, upload-time = "2025-10-06T14:09:14.664Z" }, + { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" }, + { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" }, + { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406, upload-time = "2025-10-06T14:09:21.402Z" }, + { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581, upload-time = "2025-10-06T14:09:22.98Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924, upload-time = "2025-10-06T14:09:24.655Z" }, + { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890, upload-time = "2025-10-06T14:09:26.617Z" }, + { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819, upload-time = "2025-10-06T14:09:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601, upload-time = "2025-10-06T14:09:30.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072, upload-time = "2025-10-06T14:09:32.528Z" }, + { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" }, + { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" }, + { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" }, + { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" }, + { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, + { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, + { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, + { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, + { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, + { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, + { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, + { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, + { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, + { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, + { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, + { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, + { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, + { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, + { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, + { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, + { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, + { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, + { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, + { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, + { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, + { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, + { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, + { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, + { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, + { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, + { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, + { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, + { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" }, + { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" }, + { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" }, + { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" }, + { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" }, + { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" }, + { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" }, + { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" }, + { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" }, + { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" }, + { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" }, + { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" }, + { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" }, + { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" }, + { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" }, + { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" }, + { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" }, + { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" }, + { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" }, + { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, + { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, ] [[package]] diff --git a/libs/partners/perplexity/README.md b/libs/partners/perplexity/README.md index a60940042f3..0c79d42a17c 100644 --- a/libs/partners/perplexity/README.md +++ b/libs/partners/perplexity/README.md @@ -1,5 +1,22 @@ # langchain-perplexity +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-perplexity?label=%20)](https://pypi.org/project/langchain-perplexity/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-perplexity)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-perplexity)](https://pypistats.org/packages/langchain-perplexity) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-perplexity +``` + +## πŸ€” What is this? + This package contains the LangChain integration with Perplexity. +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/perplexity) for more details. diff --git a/libs/partners/perplexity/langchain_perplexity/chat_models.py b/libs/partners/perplexity/langchain_perplexity/chat_models.py index 3cd1b1a76d7..eee0bba41d4 100644 --- a/libs/partners/perplexity/langchain_perplexity/chat_models.py +++ b/libs/partners/perplexity/langchain_perplexity/chat_models.py @@ -28,8 +28,11 @@ from langchain_core.messages import ( SystemMessageChunk, ToolMessageChunk, ) -from langchain_core.messages.ai import UsageMetadata, subtract_usage -from langchain_core.output_parsers import JsonOutputParser, PydanticOutputParser +from langchain_core.messages.ai import ( + OutputTokenDetails, + UsageMetadata, + subtract_usage, +) from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain_core.runnables import Runnable, RunnableMap, RunnablePassthrough from langchain_core.utils import get_pydantic_field_names, secret_from_env @@ -38,6 +41,11 @@ from langchain_core.utils.pydantic import is_basemodel_subclass from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator from typing_extensions import Self +from langchain_perplexity.output_parsers import ( + ReasoningJsonOutputParser, + ReasoningStructuredOutputParser, +) + _DictOrPydanticClass: TypeAlias = dict[str, Any] | type[BaseModel] _DictOrPydantic: TypeAlias = dict | BaseModel @@ -49,13 +57,28 @@ def _is_pydantic_class(obj: Any) -> bool: def _create_usage_metadata(token_usage: dict) -> UsageMetadata: + """Create UsageMetadata from Perplexity token usage data. + + Args: + token_usage: Dictionary containing token usage information from Perplexity API. + + Returns: + UsageMetadata with properly structured token counts and details. + """ input_tokens = token_usage.get("prompt_tokens", 0) output_tokens = token_usage.get("completion_tokens", 0) total_tokens = token_usage.get("total_tokens", input_tokens + output_tokens) + + # Build output_token_details for Perplexity-specific fields + output_token_details: OutputTokenDetails = {} + output_token_details["reasoning"] = token_usage.get("reasoning_tokens", 0) + output_token_details["citation_tokens"] = token_usage.get("citation_tokens", 0) # type: ignore[typeddict-unknown-key] + return UsageMetadata( input_tokens=input_tokens, output_tokens=output_tokens, total_tokens=total_tokens, + output_token_details=output_token_details, ) @@ -72,21 +95,21 @@ class ChatPerplexity(BaseChatModel): ``` Key init args - completion params: - model: str + model: Name of the model to use. e.g. "sonar" - temperature: float - Sampling temperature to use. Default is 0.7 - max_tokens: int | None + temperature: + Sampling temperature to use. + max_tokens: Maximum number of tokens to generate. - streaming: bool + streaming: Whether to stream the results or not. Key init args - client params: - pplx_api_key: str | None - API key for PerplexityChat API. Default is None. - request_timeout: float | Tuple[float, float] | None - Timeout for requests to PerplexityChat completion API. Default is None. - max_retries: int + pplx_api_key: + API key for PerplexityChat API. + request_timeout: + Timeout for requests to PerplexityChat completion API. + max_retries: Maximum number of retries to make when generating. See full list of supported init args and their descriptions in the params section. @@ -96,14 +119,14 @@ class ChatPerplexity(BaseChatModel): ```python from langchain_perplexity import ChatPerplexity - llm = ChatPerplexity(model="sonar", temperature=0.7) + model = ChatPerplexity(model="sonar", temperature=0.7) ``` Invoke: ```python messages = [("system", "You are a chatbot."), ("user", "Hello!")] - llm.invoke(messages) + model.invoke(messages) ``` Invoke with structured output: @@ -117,53 +140,61 @@ class ChatPerplexity(BaseChatModel): content: str - llm.with_structured_output(StructuredOutput) - llm.invoke(messages) + model.with_structured_output(StructuredOutput) + model.invoke(messages) ``` Invoke with perplexity-specific params: ```python - llm.invoke(messages, extra_body={"search_recency_filter": "week"}) + model.invoke(messages, extra_body={"search_recency_filter": "week"}) ``` Stream: ```python - for chunk in llm.stream(messages): + for chunk in model.stream(messages): print(chunk.content) ``` Token usage: ```python - response = llm.invoke(messages) + response = model.invoke(messages) response.usage_metadata ``` Response metadata: ```python - response = llm.invoke(messages) + response = model.invoke(messages) response.response_metadata ``` """ # noqa: E501 - client: Any = None #: :meta private: + client: Any = None + model: str = "sonar" """Model name.""" + temperature: float = 0.7 """What sampling temperature to use.""" + model_kwargs: dict[str, Any] = Field(default_factory=dict) """Holds any model parameters valid for `create` call not explicitly specified.""" + pplx_api_key: SecretStr | None = Field( default_factory=secret_from_env("PPLX_API_KEY", default=None), alias="api_key" ) """Base URL path for API requests, leave blank if not using a proxy or service emulator.""" + request_timeout: float | tuple[float, float] | None = Field(None, alias="timeout") - """Timeout for requests to PerplexityChat completion API. Default is None.""" + """Timeout for requests to PerplexityChat completion API.""" + max_retries: int = 6 """Maximum number of retries to make when generating.""" + streaming: bool = False """Whether to stream the results or not.""" + max_tokens: int | None = None """Maximum number of tokens to generate.""" @@ -301,6 +332,7 @@ class ChatPerplexity(BaseChatModel): prev_total_usage: UsageMetadata | None = None added_model_name: bool = False + added_search_queries: bool = False for chunk in stream_resp: if not isinstance(chunk, dict): chunk = chunk.model_dump() @@ -332,6 +364,13 @@ class ChatPerplexity(BaseChatModel): generation_info["model_name"] = model_name added_model_name = True + # Add num_search_queries to generation_info if present + if total_usage := chunk.get("usage"): + if num_search_queries := total_usage.get("num_search_queries"): + if not added_search_queries: + generation_info["num_search_queries"] = num_search_queries + added_search_queries = True + chunk = self._convert_delta_to_message_chunk( choice["delta"], default_chunk_class ) @@ -369,20 +408,29 @@ class ChatPerplexity(BaseChatModel): params = {**params, **kwargs} response = self.client.chat.completions.create(messages=message_dicts, **params) if usage := getattr(response, "usage", None): - usage_metadata = _create_usage_metadata(usage.model_dump()) + usage_dict = usage.model_dump() + usage_metadata = _create_usage_metadata(usage_dict) else: usage_metadata = None + usage_dict = {} additional_kwargs = {} for attr in ["citations", "images", "related_questions", "search_results"]: if hasattr(response, attr): additional_kwargs[attr] = getattr(response, attr) + # Build response_metadata with model_name and num_search_queries + response_metadata: dict[str, Any] = { + "model_name": getattr(response, "model", self.model) + } + if num_search_queries := usage_dict.get("num_search_queries"): + response_metadata["num_search_queries"] = num_search_queries + message = AIMessage( content=response.choices[0].message.content, additional_kwargs=additional_kwargs, usage_metadata=usage_metadata, - response_metadata={"model_name": getattr(response, "model", self.model)}, + response_metadata=response_metadata, ) return ChatResult(generations=[ChatGeneration(message=message)]) @@ -423,13 +471,18 @@ class ChatPerplexity(BaseChatModel): include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. strict: Unsupported: whether to enable strict schema adherence when generating the output. This parameter is included for compatibility with other @@ -438,17 +491,18 @@ class ChatPerplexity(BaseChatModel): kwargs: Additional keyword args aren't supported. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is False and `schema` is a Pydantic class, Runnable outputs - an instance of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is False then Runnable outputs a dict. - - If `include_raw` is True, then Runnable outputs a dict with keys: - - - `'raw'`: BaseMessage - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` """ # noqa: E501 if method in ("function_calling", "json_mode"): method = "json_schema" @@ -471,9 +525,9 @@ class ChatPerplexity(BaseChatModel): }, ) output_parser = ( - PydanticOutputParser(pydantic_object=schema) # type: ignore[arg-type] + ReasoningStructuredOutputParser(pydantic_object=schema) # type: ignore[arg-type] if is_pydantic_schema - else JsonOutputParser() + else ReasoningJsonOutputParser() ) else: raise ValueError( diff --git a/libs/partners/perplexity/langchain_perplexity/output_parsers.py b/libs/partners/perplexity/langchain_perplexity/output_parsers.py new file mode 100644 index 00000000000..917a8dc6e08 --- /dev/null +++ b/libs/partners/perplexity/langchain_perplexity/output_parsers.py @@ -0,0 +1,88 @@ +import re +from typing import Any, Generic + +from langchain_core.output_parsers import JsonOutputParser, PydanticOutputParser +from langchain_core.outputs import Generation +from langchain_core.utils.pydantic import TBaseModel + + +def strip_think_tags(text: str) -> str: + """Removes all ... tags and their content from text. + + This function removes all occurrences of think tags, preserving text + before, between, and after the tags. It also handles markdown code fences. + + Args: + text: The input text that may contain think tags. + + Returns: + The text with all `...` blocks removed. + """ + # Remove all ... blocks using regex + # The pattern matches followed by any content (non-greedy) until + result = re.sub(r".*?", "", text, flags=re.DOTALL) + + # Remove markdown code fence markers if present + result = result.strip() + if result.startswith("```json"): + result = result[len("```json") :].strip() + elif result.startswith("```"): + result = result[3:].strip() + + if result.endswith("```"): + result = result[:-3].strip() + + return result + + +class ReasoningJsonOutputParser(JsonOutputParser): + """A JSON output parser that strips reasoning tags before parsing. + + This parser removes any content enclosed in tags from the input text + before delegating to the parent JsonOutputParser for JSON parsing. + + """ + + def parse_result(self, result: list[Generation], *, partial: bool = False) -> Any: + """Parse the result of an LLM call to a JSON object. + + Args: + result: The result of the LLM call. + partial: Whether to parse partial JSON objects. + If `True`, the output will be a JSON object containing + all the keys that have been returned so far. + If `False`, the output will be the full JSON object. + + Returns: + The parsed JSON object. + + Raises: + OutputParserException: If the output is not valid JSON. + """ + text = result[0].text + text = strip_think_tags(text) + return super().parse_result([Generation(text=text)], partial=partial) + + +class ReasoningStructuredOutputParser( + PydanticOutputParser[TBaseModel], Generic[TBaseModel] +): + """A structured output parser that strips reasoning tags before parsing. + + This parser removes any content enclosed in tags from the input text + before delegating to the parent PydanticOutputParser for structured parsing. + """ + + def parse_result(self, result: list[Generation], *, partial: bool = False) -> Any: + """Parse the result of an LLM call to a Pydantic object. + + Args: + result: The result of the LLM call. + partial: Whether to parse partial JSON objects. + If `True`, the output will be a JSON object containing + all the keys that have been returned so far. + If `False`, the output will be the full JSON object. + """ + text = result[0].text + text = strip_think_tags(text) + return super().parse_result([Generation(text=text)], partial=partial) diff --git a/libs/partners/perplexity/pyproject.toml b/libs/partners/perplexity/pyproject.toml index f64597fcc14..5d082d456af 100644 --- a/libs/partners/perplexity/pyproject.toml +++ b/libs/partners/perplexity/pyproject.toml @@ -3,26 +3,27 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-perplexity" +description = "An integration package connecting Perplexity and LangChain" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.0" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", "openai>=2.0.0,<3.0.0", ] -name = "langchain-perplexity" -version = "1.0.0a1" -description = "An integration package connecting Perplexity and LangChain" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/perplexity" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/perplexity" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-perplexity%22" -docs = "https://reference.langchain.com/python/integrations/langchain_perplexity/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/perplexity" +Documentation = "https://reference.langchain.com/python/integrations/langchain_perplexity/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/perplexity" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-perplexity%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ @@ -43,7 +44,7 @@ lint = ["ruff>=0.13.1,<0.14.0"] dev = ["langchain-core"] test_integration = [ "httpx>=0.27.0,<1.0.0", - "pillow>=10.3.0,<11.0.0", + "pillow>=10.3.0,<12.0.0", ] typing = [ "mypy>=1.10.0,<2.0.0", diff --git a/libs/partners/perplexity/tests/unit_tests/test_chat_models.py b/libs/partners/perplexity/tests/unit_tests/test_chat_models.py index a05f80d6c3f..4fc7f08611b 100644 --- a/libs/partners/perplexity/tests/unit_tests/test_chat_models.py +++ b/libs/partners/perplexity/tests/unit_tests/test_chat_models.py @@ -5,6 +5,7 @@ from langchain_core.messages import AIMessageChunk, BaseMessage from pytest_mock import MockerFixture from langchain_perplexity import ChatPerplexity +from langchain_perplexity.chat_models import _create_usage_metadata def test_perplexity_model_name_param() -> None: @@ -295,3 +296,230 @@ def test_perplexity_stream_includes_citations_and_search_results( } patcher.assert_called_once() + + +def test_create_usage_metadata_basic() -> None: + """Test _create_usage_metadata with basic token counts.""" + token_usage = { + "prompt_tokens": 10, + "completion_tokens": 20, + "total_tokens": 30, + } + + usage_metadata = _create_usage_metadata(token_usage) + + assert usage_metadata["input_tokens"] == 10 + assert usage_metadata["output_tokens"] == 20 + assert usage_metadata["total_tokens"] == 30 + assert usage_metadata["output_token_details"]["reasoning"] == 0 + assert usage_metadata["output_token_details"]["citation_tokens"] == 0 # type: ignore[typeddict-item] + + +def test_create_usage_metadata_with_reasoning_tokens() -> None: + """Test _create_usage_metadata with reasoning tokens.""" + token_usage = { + "prompt_tokens": 50, + "completion_tokens": 100, + "total_tokens": 150, + "reasoning_tokens": 25, + } + + usage_metadata = _create_usage_metadata(token_usage) + + assert usage_metadata["input_tokens"] == 50 + assert usage_metadata["output_tokens"] == 100 + assert usage_metadata["total_tokens"] == 150 + assert usage_metadata["output_token_details"]["reasoning"] == 25 + assert usage_metadata["output_token_details"]["citation_tokens"] == 0 # type: ignore[typeddict-item] + + +def test_create_usage_metadata_with_citation_tokens() -> None: + """Test _create_usage_metadata with citation tokens.""" + token_usage = { + "prompt_tokens": 100, + "completion_tokens": 200, + "total_tokens": 300, + "citation_tokens": 15, + } + + usage_metadata = _create_usage_metadata(token_usage) + + assert usage_metadata["input_tokens"] == 100 + assert usage_metadata["output_tokens"] == 200 + assert usage_metadata["total_tokens"] == 300 + assert usage_metadata["output_token_details"]["reasoning"] == 0 + assert usage_metadata["output_token_details"]["citation_tokens"] == 15 # type: ignore[typeddict-item] + + +def test_create_usage_metadata_with_all_token_types() -> None: + """Test _create_usage_metadata with all token types. + + Tests reasoning tokens and citation tokens together. + """ + token_usage = { + "prompt_tokens": 75, + "completion_tokens": 150, + "total_tokens": 225, + "reasoning_tokens": 30, + "citation_tokens": 20, + } + + usage_metadata = _create_usage_metadata(token_usage) + + assert usage_metadata["input_tokens"] == 75 + assert usage_metadata["output_tokens"] == 150 + assert usage_metadata["total_tokens"] == 225 + assert usage_metadata["output_token_details"]["reasoning"] == 30 + assert usage_metadata["output_token_details"]["citation_tokens"] == 20 # type: ignore[typeddict-item] + + +def test_create_usage_metadata_missing_optional_fields() -> None: + """Test _create_usage_metadata with missing optional fields defaults to 0.""" + token_usage = { + "prompt_tokens": 25, + "completion_tokens": 50, + } + + usage_metadata = _create_usage_metadata(token_usage) + + assert usage_metadata["input_tokens"] == 25 + assert usage_metadata["output_tokens"] == 50 + # Total tokens should be calculated if not provided + assert usage_metadata["total_tokens"] == 75 + assert usage_metadata["output_token_details"]["reasoning"] == 0 + assert usage_metadata["output_token_details"]["citation_tokens"] == 0 # type: ignore[typeddict-item] + + +def test_create_usage_metadata_empty_dict() -> None: + """Test _create_usage_metadata with empty token usage dict.""" + token_usage: dict = {} + + usage_metadata = _create_usage_metadata(token_usage) + + assert usage_metadata["input_tokens"] == 0 + assert usage_metadata["output_tokens"] == 0 + assert usage_metadata["total_tokens"] == 0 + assert usage_metadata["output_token_details"]["reasoning"] == 0 + assert usage_metadata["output_token_details"]["citation_tokens"] == 0 # type: ignore[typeddict-item] + + +def test_perplexity_invoke_includes_num_search_queries(mocker: MockerFixture) -> None: + """Test that invoke includes num_search_queries in response_metadata.""" + llm = ChatPerplexity(model="test", timeout=30, verbose=True) + + mock_usage = MagicMock() + mock_usage.model_dump.return_value = { + "prompt_tokens": 10, + "completion_tokens": 20, + "total_tokens": 30, + "num_search_queries": 3, + } + + mock_response = MagicMock() + mock_response.choices = [ + MagicMock( + message=MagicMock( + content="Test response", + tool_calls=None, + ), + finish_reason="stop", + ) + ] + mock_response.model = "test-model" + mock_response.usage = mock_usage + + patcher = mocker.patch.object( + llm.client.chat.completions, "create", return_value=mock_response + ) + + result = llm.invoke("Test query") + + assert result.response_metadata["num_search_queries"] == 3 + assert result.response_metadata["model_name"] == "test-model" + patcher.assert_called_once() + + +def test_perplexity_invoke_without_num_search_queries(mocker: MockerFixture) -> None: + """Test that invoke works when num_search_queries is not provided.""" + llm = ChatPerplexity(model="test", timeout=30, verbose=True) + + mock_usage = MagicMock() + mock_usage.model_dump.return_value = { + "prompt_tokens": 10, + "completion_tokens": 20, + "total_tokens": 30, + } + + mock_response = MagicMock() + mock_response.choices = [ + MagicMock( + message=MagicMock( + content="Test response", + tool_calls=None, + ), + finish_reason="stop", + ) + ] + mock_response.model = "test-model" + mock_response.usage = mock_usage + + patcher = mocker.patch.object( + llm.client.chat.completions, "create", return_value=mock_response + ) + + result = llm.invoke("Test query") + + assert "num_search_queries" not in result.response_metadata + assert result.response_metadata["model_name"] == "test-model" + patcher.assert_called_once() + + +def test_perplexity_stream_includes_num_search_queries(mocker: MockerFixture) -> None: + """Test that stream properly handles num_search_queries in usage data.""" + llm = ChatPerplexity(model="test", timeout=30, verbose=True) + + mock_chunk_0 = { + "choices": [{"delta": {"content": "Hello "}, "finish_reason": None}], + } + mock_chunk_1 = { + "choices": [{"delta": {"content": "world"}, "finish_reason": None}], + } + mock_chunk_2 = { + "choices": [{"delta": {}, "finish_reason": "stop"}], + "usage": { + "prompt_tokens": 5, + "completion_tokens": 10, + "total_tokens": 15, + "num_search_queries": 2, + "reasoning_tokens": 1, + "citation_tokens": 3, + }, + } + mock_chunks: list[dict[str, Any]] = [mock_chunk_0, mock_chunk_1, mock_chunk_2] + mock_stream = MagicMock() + mock_stream.__iter__.return_value = mock_chunks + + patcher = mocker.patch.object( + llm.client.chat.completions, "create", return_value=mock_stream + ) + + chunks_list = list(llm.stream("Test query")) + + # Find the chunk with usage metadata + usage_chunk = None + for chunk in chunks_list: + if chunk.usage_metadata: + usage_chunk = chunk + break + + # Verify usage metadata is properly set + assert usage_chunk is not None + assert usage_chunk.usage_metadata is not None + assert usage_chunk.usage_metadata["input_tokens"] == 5 + assert usage_chunk.usage_metadata["output_tokens"] == 10 + assert usage_chunk.usage_metadata["total_tokens"] == 15 + # Verify reasoning and citation tokens are included + assert usage_chunk.usage_metadata["output_token_details"]["reasoning"] == 1 + assert usage_chunk.usage_metadata["output_token_details"]["citation_tokens"] == 3 # type: ignore[typeddict-item] + + patcher.assert_called_once() diff --git a/libs/partners/perplexity/tests/unit_tests/test_output_parsers.py b/libs/partners/perplexity/tests/unit_tests/test_output_parsers.py new file mode 100644 index 00000000000..369e028f821 --- /dev/null +++ b/libs/partners/perplexity/tests/unit_tests/test_output_parsers.py @@ -0,0 +1,356 @@ +"""Unit tests for output parsers.""" + +import pytest +from langchain_core.exceptions import OutputParserException +from langchain_core.outputs import Generation +from pydantic import BaseModel, Field + +from langchain_perplexity.output_parsers import ( + ReasoningJsonOutputParser, + ReasoningStructuredOutputParser, + strip_think_tags, +) + + +class TestStripThinkTags: + """Tests for the strip_think_tags function.""" + + def test_strip_simple_think_tags(self) -> None: + """Test stripping simple think tags.""" + text = "Hello some reasoning world" + result = strip_think_tags(text) + assert result == "Hello world" + + def test_strip_multiple_think_tags(self) -> None: + """Test stripping multiple think tags.""" + text = "first Hello second world\ + third" + result = strip_think_tags(text) + assert result == "Hello world" + + def test_strip_nested_like_think_tags(self) -> None: + """Test stripping think tags that might appear nested.""" + text = "outer inner still outer result" + result = strip_think_tags(text) + # The function removes from first to first + # then continues from after that + assert result == "still outer result" + + def test_strip_think_tags_no_closing_tag(self) -> None: + """Test handling of think tags without closing tag.""" + text = "Hello unclosed reasoning world" + result = strip_think_tags(text) + # Treats unclosed tag as literal text + assert result == "Hello unclosed reasoning world" + + def test_strip_think_tags_empty_content(self) -> None: + """Test stripping empty think tags.""" + text = "Hello world" + result = strip_think_tags(text) + assert result == "Hello world" + + def test_strip_think_tags_no_tags(self) -> None: + """Test text without any think tags.""" + text = "Hello world" + result = strip_think_tags(text) + assert result == "Hello world" + + def test_strip_think_tags_only_tags(self) -> None: + """Test text containing only think tags.""" + text = "reasoning" + result = strip_think_tags(text) + assert result == "" + + def test_strip_think_tags_multiline(self) -> None: + """Test stripping think tags across multiple lines.""" + text = """Hello + +reasoning line 1 +reasoning line 2 + +world""" + result = strip_think_tags(text) + assert result == "Hello\n\nworld" + + def test_strip_think_tags_with_special_chars(self) -> None: + """Test think tags containing special characters.""" + text = 'Before {"key": "value"} After' + result = strip_think_tags(text) + assert result == "Before After" + + +class TestReasoningJsonOutputParser: + """Tests for ReasoningJsonOutputParser.""" + + def test_parse_json_without_think_tags(self) -> None: + """Test parsing JSON without think tags.""" + parser = ReasoningJsonOutputParser() + text = '{"name": "John", "age": 30}' + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert result == {"name": "John", "age": 30} + + def test_parse_json_with_think_tags(self) -> None: + """Test parsing JSON with think tags.""" + parser = ReasoningJsonOutputParser() + text = 'Let me construct the JSON{"name": "John", "age": 30}' + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert result == {"name": "John", "age": 30} + + def test_parse_json_with_multiple_think_tags(self) -> None: + """Test parsing JSON with multiple think tags.""" + parser = ReasoningJsonOutputParser() + text = 'Step 1{"name": thinking"John", "age": 30}' + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert result == {"name": "John", "age": 30} + + def test_parse_markdown_json_with_think_tags(self) -> None: + """Test parsing markdown-wrapped JSON with think tags.""" + parser = ReasoningJsonOutputParser() + text = """Building response +```json +{"name": "John", "age": 30} +```""" + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert result == {"name": "John", "age": 30} + + def test_parse_complex_json_with_think_tags(self) -> None: + """Test parsing complex nested JSON with think tags.""" + parser = ReasoningJsonOutputParser() + text = """Creating nested structure +{ + "user": { + "name": "John", + "address": { + "city": "NYC", + "zip": "10001" + } + }, + "items": [1, 2, 3] +}""" + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert result == { + "user": {"name": "John", "address": {"city": "NYC", "zip": "10001"}}, + "items": [1, 2, 3], + } + + def test_parse_invalid_json_with_think_tags(self) -> None: + """Test that invalid JSON raises an exception even with think tags.""" + parser = ReasoningJsonOutputParser() + text = "This will fail{invalid json}" + generation = Generation(text=text) + with pytest.raises(OutputParserException): + parser.parse_result([generation]) + + def test_parse_empty_string_after_stripping(self) -> None: + """Test parsing when only think tags remain.""" + parser = ReasoningJsonOutputParser() + text = "Only reasoning, no output" + generation = Generation(text=text) + with pytest.raises(OutputParserException): + parser.parse_result([generation]) + + def test_parse_json_array_with_think_tags(self) -> None: + """Test parsing JSON array with think tags.""" + parser = ReasoningJsonOutputParser() + text = 'Creating array[{"id": 1}, {"id": 2}]' + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert result == [{"id": 1}, {"id": 2}] + + def test_partial_json_parsing_with_think_tags(self) -> None: + """Test partial JSON parsing with think tags.""" + parser = ReasoningJsonOutputParser() + text = 'Starting{"name": "John", "age":' + generation = Generation(text=text) + # Partial parsing should handle incomplete JSON + result = parser.parse_result([generation], partial=True) + assert result == {"name": "John"} + + +class MockPerson(BaseModel): + """Mock Pydantic model for testing.""" + + name: str = Field(description="The person's name") + age: int = Field(description="The person's age") + email: str | None = Field(default=None, description="The person's email") + + +class MockCompany(BaseModel): + """Mock nested Pydantic model for testing.""" + + company_name: str = Field(description="Company name") + employees: list[MockPerson] = Field(description="List of employees") + founded_year: int = Field(description="Year founded") + + +class TestReasoningStructuredOutputParser: + """Tests for ReasoningStructuredOutputParser.""" + + def test_parse_structured_output_without_think_tags(self) -> None: + """Test parsing structured output without think tags.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + text = '{"name": "John Doe", "age": 30, "email": "john@example.com"}' + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert isinstance(result, MockPerson) + assert result.name == "John Doe" + assert result.age == 30 + assert result.email == "john@example.com" + + def test_parse_structured_output_with_think_tags(self) -> None: + """Test parsing structured output with think tags.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + text = 'Let me create a person\ + object{"name": "John Doe", "age": 30}' + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert isinstance(result, MockPerson) + assert result.name == "John Doe" + assert result.age == 30 + assert result.email is None + + def test_parse_structured_output_with_multiple_think_tags(self) -> None: + """Test parsing with multiple think tags.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + text = """Step 1: Determine name +Step 2: Determine age +{"name": "Jane Smith", "age": 25}""" + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert isinstance(result, MockPerson) + assert result.name == "Jane Smith" + assert result.age == 25 + + def test_parse_structured_output_markdown_with_think_tags(self) -> None: + """Test parsing markdown-wrapped structured output with think tags.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + text = """Building person object +```json +{"name": "Alice Brown", "age": 35, "email": "alice@example.com"} +```""" + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert isinstance(result, MockPerson) + assert result.name == "Alice Brown" + assert result.age == 35 + assert result.email == "alice@example.com" + + def test_parse_nested_structured_output_with_think_tags(self) -> None: + """Test parsing nested Pydantic models with think tags.""" + parser: ReasoningStructuredOutputParser[MockCompany] = ( + ReasoningStructuredOutputParser(pydantic_object=MockCompany) + ) + text = """Creating company with employees +{ + "company_name": "Tech Corp", + "founded_year": 2020, + "employees": [ + {"name": "John", "age": 30}, + {"name": "Jane", "age": 28} + ] +}""" + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert isinstance(result, MockCompany) + assert result.company_name == "Tech Corp" + assert result.founded_year == 2020 + assert len(result.employees) == 2 + assert result.employees[0].name == "John" + assert result.employees[1].name == "Jane" + + def test_parse_invalid_structured_output_with_think_tags(self) -> None: + """Test that invalid structured output raises exception.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + # Missing required field 'age' + text = 'Creating person{"name": "John"}' + generation = Generation(text=text) + with pytest.raises(OutputParserException): + parser.parse_result([generation]) + + def test_parse_structured_wrong_type_with_think_tags(self) -> None: + """Test that wrong types raise validation errors.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + # Age should be int, not string + text = 'Creating person{"name": "John", "age": "thirty"}' + generation = Generation(text=text) + with pytest.raises(OutputParserException): + parser.parse_result([generation]) + + def test_parse_empty_after_stripping_think_tags(self) -> None: + """Test handling when only think tags remain.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + text = "Only reasoning here" + generation = Generation(text=text) + with pytest.raises(OutputParserException): + parser.parse_result([generation]) + + def test_get_format_instructions(self) -> None: + """Test that format instructions work correctly.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + instructions = parser.get_format_instructions() + assert "MockPerson" in instructions or "name" in instructions + assert isinstance(instructions, str) + + def test_partial_structured_parsing_with_think_tags(self) -> None: + """Test partial parsing of structured output with think tags.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + text = 'Starting{"name": "John", "age": 30' + generation = Generation(text=text) + # Partial parsing should handle incomplete JSON + result = parser.parse_result([generation], partial=True) + # With partial=True, it should return what it can parse + assert "name" in result or isinstance(result, MockPerson) + + def test_parser_with_think_tags_in_json_values(self) -> None: + """Test that think tags in JSON string values don't cause issues.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + # Think tags should be stripped before JSON parsing, so they won't be in values + text = 'reasoning{"name": "John ", "age": 30}' + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert isinstance(result, MockPerson) + assert result.name == "John " + assert result.age == 30 + + def test_multiline_think_tags_with_structured_output(self) -> None: + """Test parsing structured output with multiline think tags.""" + parser: ReasoningStructuredOutputParser[MockPerson] = ( + ReasoningStructuredOutputParser(pydantic_object=MockPerson) + ) + text = """ +Step 1: Consider the requirements +Step 2: Structure the data +Step 3: Format as JSON + +{"name": "Bob Wilson", "age": 40, "email": "bob@example.com"}""" + generation = Generation(text=text) + result = parser.parse_result([generation]) + assert isinstance(result, MockPerson) + assert result.name == "Bob Wilson" + assert result.age == 40 + assert result.email == "bob@example.com" diff --git a/libs/partners/perplexity/uv.lock b/libs/partners/perplexity/uv.lock index 2f7ab07dfd4..2bc8de6b9c9 100644 --- a/libs/partners/perplexity/uv.lock +++ b/libs/partners/perplexity/uv.lock @@ -316,7 +316,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ @@ -495,7 +495,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a6" +version = "1.0.0" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -553,7 +553,7 @@ typing = [ [[package]] name = "langchain-perplexity" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "langchain-core" }, @@ -616,7 +616,7 @@ test = [ ] test-integration = [ { name = "httpx", specifier = ">=0.27.0,<1.0.0" }, - { name = "pillow", specifier = ">=10.3.0,<11.0.0" }, + { name = "pillow", specifier = ">=10.3.0,<12.0.0" }, ] typing = [ { name = "langchain-core", editable = "../../core" }, @@ -626,7 +626,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -1133,61 +1133,104 @@ wheels = [ [[package]] name = "pillow" -version = "10.4.0" +version = "11.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/74/ad3d526f3bf7b6d3f408b73fde271ec69dfac8b81341a318ce825f2b3812/pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", size = 46555059, upload-time = "2024-07-01T09:48:43.583Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069, upload-time = "2025-07-01T09:16:30.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/69/a31cccd538ca0b5272be2a38347f8839b97a14be104ea08b0db92f749c74/pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e", size = 3509271, upload-time = "2024-07-01T09:45:22.07Z" }, - { url = "https://files.pythonhosted.org/packages/9a/9e/4143b907be8ea0bce215f2ae4f7480027473f8b61fcedfda9d851082a5d2/pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d", size = 3375658, upload-time = "2024-07-01T09:45:25.292Z" }, - { url = "https://files.pythonhosted.org/packages/8a/25/1fc45761955f9359b1169aa75e241551e74ac01a09f487adaaf4c3472d11/pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856", size = 4332075, upload-time = "2024-07-01T09:45:27.94Z" }, - { url = "https://files.pythonhosted.org/packages/5e/dd/425b95d0151e1d6c951f45051112394f130df3da67363b6bc75dc4c27aba/pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f", size = 4444808, upload-time = "2024-07-01T09:45:30.305Z" }, - { url = "https://files.pythonhosted.org/packages/b1/84/9a15cc5726cbbfe7f9f90bfb11f5d028586595907cd093815ca6644932e3/pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b", size = 4356290, upload-time = "2024-07-01T09:45:32.868Z" }, - { url = "https://files.pythonhosted.org/packages/b5/5b/6651c288b08df3b8c1e2f8c1152201e0b25d240e22ddade0f1e242fc9fa0/pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc", size = 4525163, upload-time = "2024-07-01T09:45:35.279Z" }, - { url = "https://files.pythonhosted.org/packages/07/8b/34854bf11a83c248505c8cb0fcf8d3d0b459a2246c8809b967963b6b12ae/pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e", size = 4463100, upload-time = "2024-07-01T09:45:37.74Z" }, - { url = "https://files.pythonhosted.org/packages/78/63/0632aee4e82476d9cbe5200c0cdf9ba41ee04ed77887432845264d81116d/pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46", size = 4592880, upload-time = "2024-07-01T09:45:39.89Z" }, - { url = "https://files.pythonhosted.org/packages/df/56/b8663d7520671b4398b9d97e1ed9f583d4afcbefbda3c6188325e8c297bd/pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984", size = 2235218, upload-time = "2024-07-01T09:45:42.771Z" }, - { url = "https://files.pythonhosted.org/packages/f4/72/0203e94a91ddb4a9d5238434ae6c1ca10e610e8487036132ea9bf806ca2a/pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141", size = 2554487, upload-time = "2024-07-01T09:45:45.176Z" }, - { url = "https://files.pythonhosted.org/packages/bd/52/7e7e93d7a6e4290543f17dc6f7d3af4bd0b3dd9926e2e8a35ac2282bc5f4/pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1", size = 2243219, upload-time = "2024-07-01T09:45:47.274Z" }, - { url = "https://files.pythonhosted.org/packages/a7/62/c9449f9c3043c37f73e7487ec4ef0c03eb9c9afc91a92b977a67b3c0bbc5/pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c", size = 3509265, upload-time = "2024-07-01T09:45:49.812Z" }, - { url = "https://files.pythonhosted.org/packages/f4/5f/491dafc7bbf5a3cc1845dc0430872e8096eb9e2b6f8161509d124594ec2d/pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be", size = 3375655, upload-time = "2024-07-01T09:45:52.462Z" }, - { url = "https://files.pythonhosted.org/packages/73/d5/c4011a76f4207a3c151134cd22a1415741e42fa5ddecec7c0182887deb3d/pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3", size = 4340304, upload-time = "2024-07-01T09:45:55.006Z" }, - { url = "https://files.pythonhosted.org/packages/ac/10/c67e20445a707f7a610699bba4fe050583b688d8cd2d202572b257f46600/pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6", size = 4452804, upload-time = "2024-07-01T09:45:58.437Z" }, - { url = "https://files.pythonhosted.org/packages/a9/83/6523837906d1da2b269dee787e31df3b0acb12e3d08f024965a3e7f64665/pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe", size = 4365126, upload-time = "2024-07-01T09:46:00.713Z" }, - { url = "https://files.pythonhosted.org/packages/ba/e5/8c68ff608a4203085158cff5cc2a3c534ec384536d9438c405ed6370d080/pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319", size = 4533541, upload-time = "2024-07-01T09:46:03.235Z" }, - { url = "https://files.pythonhosted.org/packages/f4/7c/01b8dbdca5bc6785573f4cee96e2358b0918b7b2c7b60d8b6f3abf87a070/pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d", size = 4471616, upload-time = "2024-07-01T09:46:05.356Z" }, - { url = "https://files.pythonhosted.org/packages/c8/57/2899b82394a35a0fbfd352e290945440e3b3785655a03365c0ca8279f351/pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696", size = 4600802, upload-time = "2024-07-01T09:46:08.145Z" }, - { url = "https://files.pythonhosted.org/packages/4d/d7/a44f193d4c26e58ee5d2d9db3d4854b2cfb5b5e08d360a5e03fe987c0086/pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496", size = 2235213, upload-time = "2024-07-01T09:46:10.211Z" }, - { url = "https://files.pythonhosted.org/packages/c1/d0/5866318eec2b801cdb8c82abf190c8343d8a1cd8bf5a0c17444a6f268291/pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91", size = 2554498, upload-time = "2024-07-01T09:46:12.685Z" }, - { url = "https://files.pythonhosted.org/packages/d4/c8/310ac16ac2b97e902d9eb438688de0d961660a87703ad1561fd3dfbd2aa0/pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22", size = 2243219, upload-time = "2024-07-01T09:46:14.83Z" }, - { url = "https://files.pythonhosted.org/packages/05/cb/0353013dc30c02a8be34eb91d25e4e4cf594b59e5a55ea1128fde1e5f8ea/pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", size = 3509350, upload-time = "2024-07-01T09:46:17.177Z" }, - { url = "https://files.pythonhosted.org/packages/e7/cf/5c558a0f247e0bf9cec92bff9b46ae6474dd736f6d906315e60e4075f737/pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", size = 3374980, upload-time = "2024-07-01T09:46:19.169Z" }, - { url = "https://files.pythonhosted.org/packages/84/48/6e394b86369a4eb68b8a1382c78dc092245af517385c086c5094e3b34428/pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", size = 4343799, upload-time = "2024-07-01T09:46:21.883Z" }, - { url = "https://files.pythonhosted.org/packages/3b/f3/a8c6c11fa84b59b9df0cd5694492da8c039a24cd159f0f6918690105c3be/pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", size = 4459973, upload-time = "2024-07-01T09:46:24.321Z" }, - { url = "https://files.pythonhosted.org/packages/7d/1b/c14b4197b80150fb64453585247e6fb2e1d93761fa0fa9cf63b102fde822/pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", size = 4370054, upload-time = "2024-07-01T09:46:26.825Z" }, - { url = "https://files.pythonhosted.org/packages/55/77/40daddf677897a923d5d33329acd52a2144d54a9644f2a5422c028c6bf2d/pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", size = 4539484, upload-time = "2024-07-01T09:46:29.355Z" }, - { url = "https://files.pythonhosted.org/packages/40/54/90de3e4256b1207300fb2b1d7168dd912a2fb4b2401e439ba23c2b2cabde/pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", size = 4477375, upload-time = "2024-07-01T09:46:31.756Z" }, - { url = "https://files.pythonhosted.org/packages/13/24/1bfba52f44193860918ff7c93d03d95e3f8748ca1de3ceaf11157a14cf16/pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", size = 4608773, upload-time = "2024-07-01T09:46:33.73Z" }, - { url = "https://files.pythonhosted.org/packages/55/04/5e6de6e6120451ec0c24516c41dbaf80cce1b6451f96561235ef2429da2e/pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", size = 2235690, upload-time = "2024-07-01T09:46:36.587Z" }, - { url = "https://files.pythonhosted.org/packages/74/0a/d4ce3c44bca8635bd29a2eab5aa181b654a734a29b263ca8efe013beea98/pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", size = 2554951, upload-time = "2024-07-01T09:46:38.777Z" }, - { url = "https://files.pythonhosted.org/packages/b5/ca/184349ee40f2e92439be9b3502ae6cfc43ac4b50bc4fc6b3de7957563894/pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", size = 2243427, upload-time = "2024-07-01T09:46:43.15Z" }, - { url = "https://files.pythonhosted.org/packages/c3/00/706cebe7c2c12a6318aabe5d354836f54adff7156fd9e1bd6c89f4ba0e98/pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3", size = 3525685, upload-time = "2024-07-01T09:46:45.194Z" }, - { url = "https://files.pythonhosted.org/packages/cf/76/f658cbfa49405e5ecbfb9ba42d07074ad9792031267e782d409fd8fe7c69/pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb", size = 3374883, upload-time = "2024-07-01T09:46:47.331Z" }, - { url = "https://files.pythonhosted.org/packages/46/2b/99c28c4379a85e65378211971c0b430d9c7234b1ec4d59b2668f6299e011/pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70", size = 4339837, upload-time = "2024-07-01T09:46:49.647Z" }, - { url = "https://files.pythonhosted.org/packages/f1/74/b1ec314f624c0c43711fdf0d8076f82d9d802afd58f1d62c2a86878e8615/pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be", size = 4455562, upload-time = "2024-07-01T09:46:51.811Z" }, - { url = "https://files.pythonhosted.org/packages/4a/2a/4b04157cb7b9c74372fa867096a1607e6fedad93a44deeff553ccd307868/pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0", size = 4366761, upload-time = "2024-07-01T09:46:53.961Z" }, - { url = "https://files.pythonhosted.org/packages/ac/7b/8f1d815c1a6a268fe90481232c98dd0e5fa8c75e341a75f060037bd5ceae/pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc", size = 4536767, upload-time = "2024-07-01T09:46:56.664Z" }, - { url = "https://files.pythonhosted.org/packages/e5/77/05fa64d1f45d12c22c314e7b97398ffb28ef2813a485465017b7978b3ce7/pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a", size = 4477989, upload-time = "2024-07-01T09:46:58.977Z" }, - { url = "https://files.pythonhosted.org/packages/12/63/b0397cfc2caae05c3fb2f4ed1b4fc4fc878f0243510a7a6034ca59726494/pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309", size = 4610255, upload-time = "2024-07-01T09:47:01.189Z" }, - { url = "https://files.pythonhosted.org/packages/7b/f9/cfaa5082ca9bc4a6de66ffe1c12c2d90bf09c309a5f52b27759a596900e7/pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060", size = 2235603, upload-time = "2024-07-01T09:47:03.918Z" }, - { url = "https://files.pythonhosted.org/packages/01/6a/30ff0eef6e0c0e71e55ded56a38d4859bf9d3634a94a88743897b5f96936/pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea", size = 2554972, upload-time = "2024-07-01T09:47:06.152Z" }, - { url = "https://files.pythonhosted.org/packages/48/2c/2e0a52890f269435eee38b21c8218e102c621fe8d8df8b9dd06fabf879ba/pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d", size = 2243375, upload-time = "2024-07-01T09:47:09.065Z" }, - { url = "https://files.pythonhosted.org/packages/38/30/095d4f55f3a053392f75e2eae45eba3228452783bab3d9a920b951ac495c/pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4", size = 3493889, upload-time = "2024-07-01T09:48:04.815Z" }, - { url = "https://files.pythonhosted.org/packages/f3/e8/4ff79788803a5fcd5dc35efdc9386af153569853767bff74540725b45863/pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da", size = 3346160, upload-time = "2024-07-01T09:48:07.206Z" }, - { url = "https://files.pythonhosted.org/packages/d7/ac/4184edd511b14f760c73f5bb8a5d6fd85c591c8aff7c2229677a355c4179/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026", size = 3435020, upload-time = "2024-07-01T09:48:09.66Z" }, - { url = "https://files.pythonhosted.org/packages/da/21/1749cd09160149c0a246a81d646e05f35041619ce76f6493d6a96e8d1103/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e", size = 3490539, upload-time = "2024-07-01T09:48:12.529Z" }, - { url = "https://files.pythonhosted.org/packages/b6/f5/f71fe1888b96083b3f6dfa0709101f61fc9e972c0c8d04e9d93ccef2a045/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5", size = 3476125, upload-time = "2024-07-01T09:48:14.891Z" }, - { url = "https://files.pythonhosted.org/packages/96/b9/c0362c54290a31866c3526848583a2f45a535aa9d725fd31e25d318c805f/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885", size = 3579373, upload-time = "2024-07-01T09:48:17.601Z" }, - { url = "https://files.pythonhosted.org/packages/52/3b/ce7a01026a7cf46e5452afa86f97a5e88ca97f562cafa76570178ab56d8d/pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5", size = 2554661, upload-time = "2024-07-01T09:48:20.293Z" }, + { url = "https://files.pythonhosted.org/packages/4c/5d/45a3553a253ac8763f3561371432a90bdbe6000fbdcf1397ffe502aa206c/pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860", size = 5316554, upload-time = "2025-07-01T09:13:39.342Z" }, + { url = "https://files.pythonhosted.org/packages/7c/c8/67c12ab069ef586a25a4a79ced553586748fad100c77c0ce59bb4983ac98/pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad", size = 4686548, upload-time = "2025-07-01T09:13:41.835Z" }, + { url = "https://files.pythonhosted.org/packages/2f/bd/6741ebd56263390b382ae4c5de02979af7f8bd9807346d068700dd6d5cf9/pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0", size = 5859742, upload-time = "2025-07-03T13:09:47.439Z" }, + { url = "https://files.pythonhosted.org/packages/ca/0b/c412a9e27e1e6a829e6ab6c2dca52dd563efbedf4c9c6aa453d9a9b77359/pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b", size = 7633087, upload-time = "2025-07-03T13:09:51.796Z" }, + { url = "https://files.pythonhosted.org/packages/59/9d/9b7076aaf30f5dd17e5e5589b2d2f5a5d7e30ff67a171eb686e4eecc2adf/pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50", size = 5963350, upload-time = "2025-07-01T09:13:43.865Z" }, + { url = "https://files.pythonhosted.org/packages/f0/16/1a6bf01fb622fb9cf5c91683823f073f053005c849b1f52ed613afcf8dae/pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae", size = 6631840, upload-time = "2025-07-01T09:13:46.161Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e6/6ff7077077eb47fde78739e7d570bdcd7c10495666b6afcd23ab56b19a43/pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9", size = 6074005, upload-time = "2025-07-01T09:13:47.829Z" }, + { url = "https://files.pythonhosted.org/packages/c3/3a/b13f36832ea6d279a697231658199e0a03cd87ef12048016bdcc84131601/pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e", size = 6708372, upload-time = "2025-07-01T09:13:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/6c/e4/61b2e1a7528740efbc70b3d581f33937e38e98ef3d50b05007267a55bcb2/pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6", size = 6277090, upload-time = "2025-07-01T09:13:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/a9/d3/60c781c83a785d6afbd6a326ed4d759d141de43aa7365725cbcd65ce5e54/pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f", size = 6985988, upload-time = "2025-07-01T09:13:55.699Z" }, + { url = "https://files.pythonhosted.org/packages/9f/28/4f4a0203165eefb3763939c6789ba31013a2e90adffb456610f30f613850/pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f", size = 2422899, upload-time = "2025-07-01T09:13:57.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531, upload-time = "2025-07-01T09:13:59.203Z" }, + { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560, upload-time = "2025-07-01T09:14:01.101Z" }, + { url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978, upload-time = "2025-07-03T13:09:55.638Z" }, + { url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168, upload-time = "2025-07-03T13:10:00.37Z" }, + { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053, upload-time = "2025-07-01T09:14:04.491Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273, upload-time = "2025-07-01T09:14:06.235Z" }, + { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043, upload-time = "2025-07-01T09:14:07.978Z" }, + { url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94", size = 6715516, upload-time = "2025-07-01T09:14:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0", size = 6274768, upload-time = "2025-07-01T09:14:11.921Z" }, + { url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac", size = 6986055, upload-time = "2025-07-01T09:14:13.623Z" }, + { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079, upload-time = "2025-07-01T09:14:15.268Z" }, + { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, + { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" }, + { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" }, + { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, + { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358, upload-time = "2025-07-01T09:14:27.053Z" }, + { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079, upload-time = "2025-07-01T09:14:30.104Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324, upload-time = "2025-07-01T09:14:31.899Z" }, + { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, + { url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328, upload-time = "2025-07-01T09:14:35.276Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652, upload-time = "2025-07-01T09:14:37.203Z" }, + { url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443, upload-time = "2025-07-01T09:14:39.344Z" }, + { url = "https://files.pythonhosted.org/packages/01/f4/91d5b3ffa718df2f53b0dc109877993e511f4fd055d7e9508682e8aba092/pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c", size = 5278474, upload-time = "2025-07-01T09:14:41.843Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0e/37d7d3eca6c879fbd9dba21268427dffda1ab00d4eb05b32923d4fbe3b12/pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd", size = 4686038, upload-time = "2025-07-01T09:14:44.008Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b0/3426e5c7f6565e752d81221af9d3676fdbb4f352317ceafd42899aaf5d8a/pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e", size = 5864407, upload-time = "2025-07-03T13:10:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c1/c6c423134229f2a221ee53f838d4be9d82bab86f7e2f8e75e47b6bf6cd77/pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1", size = 7639094, upload-time = "2025-07-03T13:10:21.857Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/09e6746630fe6372c67c648ff9deae52a2bc20897d51fa293571977ceb5d/pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805", size = 5973503, upload-time = "2025-07-01T09:14:45.698Z" }, + { url = "https://files.pythonhosted.org/packages/d5/1c/a2a29649c0b1983d3ef57ee87a66487fdeb45132df66ab30dd37f7dbe162/pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8", size = 6642574, upload-time = "2025-07-01T09:14:47.415Z" }, + { url = "https://files.pythonhosted.org/packages/36/de/d5cc31cc4b055b6c6fd990e3e7f0f8aaf36229a2698501bcb0cdf67c7146/pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", size = 6084060, upload-time = "2025-07-01T09:14:49.636Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ea/502d938cbaeec836ac28a9b730193716f0114c41325db428e6b280513f09/pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b", size = 6721407, upload-time = "2025-07-01T09:14:51.962Z" }, + { url = "https://files.pythonhosted.org/packages/45/9c/9c5e2a73f125f6cbc59cc7087c8f2d649a7ae453f83bd0362ff7c9e2aee2/pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3", size = 6273841, upload-time = "2025-07-01T09:14:54.142Z" }, + { url = "https://files.pythonhosted.org/packages/23/85/397c73524e0cd212067e0c969aa245b01d50183439550d24d9f55781b776/pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51", size = 6978450, upload-time = "2025-07-01T09:14:56.436Z" }, + { url = "https://files.pythonhosted.org/packages/17/d2/622f4547f69cd173955194b78e4d19ca4935a1b0f03a302d655c9f6aae65/pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580", size = 2423055, upload-time = "2025-07-01T09:14:58.072Z" }, + { url = "https://files.pythonhosted.org/packages/dd/80/a8a2ac21dda2e82480852978416cfacd439a4b490a501a288ecf4fe2532d/pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e", size = 5281110, upload-time = "2025-07-01T09:14:59.79Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/b79754ca790f315918732e18f82a8146d33bcd7f4494380457ea89eb883d/pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d", size = 4689547, upload-time = "2025-07-01T09:15:01.648Z" }, + { url = "https://files.pythonhosted.org/packages/49/20/716b8717d331150cb00f7fdd78169c01e8e0c219732a78b0e59b6bdb2fd6/pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced", size = 5901554, upload-time = "2025-07-03T13:10:27.018Z" }, + { url = "https://files.pythonhosted.org/packages/74/cf/a9f3a2514a65bb071075063a96f0a5cf949c2f2fce683c15ccc83b1c1cab/pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c", size = 7669132, upload-time = "2025-07-03T13:10:33.01Z" }, + { url = "https://files.pythonhosted.org/packages/98/3c/da78805cbdbee9cb43efe8261dd7cc0b4b93f2ac79b676c03159e9db2187/pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8", size = 6005001, upload-time = "2025-07-01T09:15:03.365Z" }, + { url = "https://files.pythonhosted.org/packages/6c/fa/ce044b91faecf30e635321351bba32bab5a7e034c60187fe9698191aef4f/pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59", size = 6668814, upload-time = "2025-07-01T09:15:05.655Z" }, + { url = "https://files.pythonhosted.org/packages/7b/51/90f9291406d09bf93686434f9183aba27b831c10c87746ff49f127ee80cb/pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe", size = 6113124, upload-time = "2025-07-01T09:15:07.358Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5a/6fec59b1dfb619234f7636d4157d11fb4e196caeee220232a8d2ec48488d/pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c", size = 6747186, upload-time = "2025-07-01T09:15:09.317Z" }, + { url = "https://files.pythonhosted.org/packages/49/6b/00187a044f98255225f172de653941e61da37104a9ea60e4f6887717e2b5/pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788", size = 6277546, upload-time = "2025-07-01T09:15:11.311Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5c/6caaba7e261c0d75bab23be79f1d06b5ad2a2ae49f028ccec801b0e853d6/pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31", size = 6985102, upload-time = "2025-07-01T09:15:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/f3/7e/b623008460c09a0cb38263c93b828c666493caee2eb34ff67f778b87e58c/pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e", size = 2424803, upload-time = "2025-07-01T09:15:15.695Z" }, + { url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12", size = 5278520, upload-time = "2025-07-01T09:15:17.429Z" }, + { url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a", size = 4686116, upload-time = "2025-07-01T09:15:19.423Z" }, + { url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632", size = 5864597, upload-time = "2025-07-03T13:10:38.404Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673", size = 7638246, upload-time = "2025-07-03T13:10:44.987Z" }, + { url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027", size = 5973336, upload-time = "2025-07-01T09:15:21.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77", size = 6642699, upload-time = "2025-07-01T09:15:23.186Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d2/a9a4f280c6aefedce1e8f615baaa5474e0701d86dd6f1dede66726462bbd/pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874", size = 6083789, upload-time = "2025-07-01T09:15:25.1Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/86b0cd9dbb683a9d5e960b66c7379e821a19be4ac5810e2e5a715c09a0c0/pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a", size = 6720386, upload-time = "2025-07-01T09:15:27.378Z" }, + { url = "https://files.pythonhosted.org/packages/e7/95/88efcaf384c3588e24259c4203b909cbe3e3c2d887af9e938c2022c9dd48/pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214", size = 6370911, upload-time = "2025-07-01T09:15:29.294Z" }, + { url = "https://files.pythonhosted.org/packages/2e/cc/934e5820850ec5eb107e7b1a72dd278140731c669f396110ebc326f2a503/pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635", size = 7117383, upload-time = "2025-07-01T09:15:31.128Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e9/9c0a616a71da2a5d163aa37405e8aced9a906d574b4a214bede134e731bc/pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6", size = 2511385, upload-time = "2025-07-01T09:15:33.328Z" }, + { url = "https://files.pythonhosted.org/packages/1a/33/c88376898aff369658b225262cd4f2659b13e8178e7534df9e6e1fa289f6/pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae", size = 5281129, upload-time = "2025-07-01T09:15:35.194Z" }, + { url = "https://files.pythonhosted.org/packages/1f/70/d376247fb36f1844b42910911c83a02d5544ebd2a8bad9efcc0f707ea774/pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653", size = 4689580, upload-time = "2025-07-01T09:15:37.114Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1c/537e930496149fbac69efd2fc4329035bbe2e5475b4165439e3be9cb183b/pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6", size = 5902860, upload-time = "2025-07-03T13:10:50.248Z" }, + { url = "https://files.pythonhosted.org/packages/bd/57/80f53264954dcefeebcf9dae6e3eb1daea1b488f0be8b8fef12f79a3eb10/pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36", size = 7670694, upload-time = "2025-07-03T13:10:56.432Z" }, + { url = "https://files.pythonhosted.org/packages/70/ff/4727d3b71a8578b4587d9c276e90efad2d6fe0335fd76742a6da08132e8c/pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b", size = 6005888, upload-time = "2025-07-01T09:15:39.436Z" }, + { url = "https://files.pythonhosted.org/packages/05/ae/716592277934f85d3be51d7256f3636672d7b1abfafdc42cf3f8cbd4b4c8/pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477", size = 6670330, upload-time = "2025-07-01T09:15:41.269Z" }, + { url = "https://files.pythonhosted.org/packages/e7/bb/7fe6cddcc8827b01b1a9766f5fdeb7418680744f9082035bdbabecf1d57f/pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50", size = 6114089, upload-time = "2025-07-01T09:15:43.13Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f5/06bfaa444c8e80f1a8e4bff98da9c83b37b5be3b1deaa43d27a0db37ef84/pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b", size = 6748206, upload-time = "2025-07-01T09:15:44.937Z" }, + { url = "https://files.pythonhosted.org/packages/f0/77/bc6f92a3e8e6e46c0ca78abfffec0037845800ea38c73483760362804c41/pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12", size = 6377370, upload-time = "2025-07-01T09:15:46.673Z" }, + { url = "https://files.pythonhosted.org/packages/4a/82/3a721f7d69dca802befb8af08b7c79ebcab461007ce1c18bd91a5d5896f9/pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db", size = 7121500, upload-time = "2025-07-01T09:15:48.512Z" }, + { url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835, upload-time = "2025-07-01T09:15:50.399Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8b/209bd6b62ce8367f47e68a218bffac88888fdf2c9fcf1ecadc6c3ec1ebc7/pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967", size = 5270556, upload-time = "2025-07-01T09:16:09.961Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e6/231a0b76070c2cfd9e260a7a5b504fb72da0a95279410fa7afd99d9751d6/pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe", size = 4654625, upload-time = "2025-07-01T09:16:11.913Z" }, + { url = "https://files.pythonhosted.org/packages/13/f4/10cf94fda33cb12765f2397fc285fa6d8eb9c29de7f3185165b702fc7386/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c", size = 4874207, upload-time = "2025-07-03T13:11:10.201Z" }, + { url = "https://files.pythonhosted.org/packages/72/c9/583821097dc691880c92892e8e2d41fe0a5a3d6021f4963371d2f6d57250/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25", size = 6583939, upload-time = "2025-07-03T13:11:15.68Z" }, + { url = "https://files.pythonhosted.org/packages/3b/8e/5c9d410f9217b12320efc7c413e72693f48468979a013ad17fd690397b9a/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27", size = 4957166, upload-time = "2025-07-01T09:16:13.74Z" }, + { url = "https://files.pythonhosted.org/packages/62/bb/78347dbe13219991877ffb3a91bf09da8317fbfcd4b5f9140aeae020ad71/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a", size = 5581482, upload-time = "2025-07-01T09:16:16.107Z" }, + { url = "https://files.pythonhosted.org/packages/d9/28/1000353d5e61498aaeaaf7f1e4b49ddb05f2c6575f9d4f9f914a3538b6e1/pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f", size = 6984596, upload-time = "2025-07-01T09:16:18.07Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566, upload-time = "2025-07-01T09:16:19.801Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618, upload-time = "2025-07-01T09:16:21.818Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248, upload-time = "2025-07-03T13:11:20.738Z" }, + { url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963, upload-time = "2025-07-03T13:11:26.283Z" }, + { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170, upload-time = "2025-07-01T09:16:23.762Z" }, + { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505, upload-time = "2025-07-01T09:16:25.593Z" }, + { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598, upload-time = "2025-07-01T09:16:27.732Z" }, ] [[package]] @@ -1308,7 +1351,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1316,96 +1359,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] diff --git a/libs/partners/prompty/README.md b/libs/partners/prompty/README.md index 4721cc5a28c..62a9f047ad7 100644 --- a/libs/partners/prompty/README.md +++ b/libs/partners/prompty/README.md @@ -1,15 +1,26 @@ # langchain-prompty +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-prompty?label=%20)](https://pypi.org/project/langchain-prompty/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-prompty)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-prompty)](https://pypistats.org/packages/langchain-prompty) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-prompty +``` + +## πŸ€” What is this? + This package contains the LangChain integration with Microsoft Prompty. +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/microsoft) for more details. -## Installation - -```bash -pip install -U langchain-prompty -``` - ## Usage Use the `create_chat_prompt` function to load `prompty` file as prompt. diff --git a/libs/partners/prompty/langchain_prompty/utils.py b/libs/partners/prompty/langchain_prompty/utils.py index 19c4dd837ec..e6d384eec72 100644 --- a/libs/partners/prompty/langchain_prompty/utils.py +++ b/libs/partners/prompty/langchain_prompty/utils.py @@ -225,19 +225,19 @@ def execute( raw: bool = False, connection: str = "default", ) -> Any: - """Execute a prompty. + """Execute a `Prompty`. Args: prompt: The prompt to execute. - Can be a path to a prompty file or a Prompty object. - configuration: The configuration to use. Defaults to `{}`. - parameters: The parameters to use. Defaults to `{}`. - inputs: The inputs to the prompty. Defaults to `{}`. - raw: Whether to return the raw output. Defaults to `False`. - connection: The connection to use. Defaults to `'default'`. + Can be a path to a prompty file or a `Prompty` object. + configuration: The configuration to use. + parameters: The parameters to use. + inputs: The inputs to the `Prompty`. + raw: Whether to return the raw output. + connection: The connection to use. Returns: - The result of executing the prompty. + The result of executing the `Prompty`. """ if isinstance(prompt, str): diff --git a/libs/partners/prompty/pyproject.toml b/libs/partners/prompty/pyproject.toml index bf6f10c9265..3a4413f09d3 100644 --- a/libs/partners/prompty/pyproject.toml +++ b/libs/partners/prompty/pyproject.toml @@ -7,22 +7,22 @@ authors = [] license = { text = "MIT" } requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", "pyyaml>=6.0.1,<7.0.0" ] name = "langchain-prompty" -version = "1.0.0a1" +version = "1.0.0" description = "An integration package connecting Prompty and LangChain" readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/microsoft" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/prompty" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-prompty%22" -docs = "https://reference.langchain.com/python/integrations/langchain_prompty/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/microsoft" +Documentation = "https://reference.langchain.com/python/integrations/langchain_prompty/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/prompty" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-prompty%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ @@ -48,7 +48,7 @@ dev = [ "langchain-core" ] typing = [ - "mypy>=0.991.0,<1.0.0", + "mypy>=1.18.1,<1.19.0", "types-pyyaml>=6.0.12.20240311,<7.0.0.0", "langchain-core", ] diff --git a/libs/partners/prompty/tests/unit_tests/test_prompty_serialization.py b/libs/partners/prompty/tests/unit_tests/test_prompty_serialization.py index b648352d065..72a4308f38e 100644 --- a/libs/partners/prompty/tests/unit_tests/test_prompty_serialization.py +++ b/libs/partners/prompty/tests/unit_tests/test_prompty_serialization.py @@ -112,7 +112,9 @@ def test_prompty_used_in_agent() -> None: input: str chat_history: list[tuple[str, str]] = Field( ..., - extra={"widget": {"type": "chat", "input": "input", "output": "output"}}, + json_schema_extra={ + "widget": {"type": "chat", "input": "input", "output": "output"} + }, ) agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True).with_types( diff --git a/libs/partners/prompty/uv.lock b/libs/partners/prompty/uv.lock index 39d844efcf0..0f81ac96094 100644 --- a/libs/partners/prompty/uv.lock +++ b/libs/partners/prompty/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.10.0, <4.0.0" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -360,7 +360,7 @@ wheels = [ [[package]] name = "langchain-classic" -version = "0.3.27" +version = "1.0.0" source = { editable = "../../langchain" } dependencies = [ { name = "async-timeout", marker = "python_full_version < '3.11'" }, @@ -451,9 +451,10 @@ test-integration = [ { name = "wrapt", specifier = ">=1.15.0,<2.0.0" }, ] typing = [ + { name = "fastapi", specifier = ">=0.116.1,<1.0.0" }, { name = "langchain-core", editable = "../../core" }, { name = "langchain-text-splitters", editable = "../../text-splitters" }, - { name = "mypy", specifier = ">=1.15.0,<1.16.0" }, + { name = "mypy", specifier = ">=1.18.2,<1.19.0" }, { name = "mypy-protobuf", specifier = ">=3.0.0,<4.0.0" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, @@ -467,7 +468,7 @@ typing = [ [[package]] name = "langchain-core" -version = "1.0.0a6" +version = "1.0.0" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -525,7 +526,7 @@ typing = [ [[package]] name = "langchain-prompty" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "langchain-core" }, @@ -591,13 +592,13 @@ test = [ test-integration = [] typing = [ { name = "langchain-core", editable = "../../core" }, - { name = "mypy", specifier = ">=0.991.0,<1.0.0" }, + { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, { name = "types-pyyaml", specifier = ">=6.0.12.20240311,<7.0.0.0" }, ] [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -642,7 +643,7 @@ typing = [ [[package]] name = "langchain-text-splitters" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "../../text-splitters" } dependencies = [ { name = "langchain-core" }, @@ -675,8 +676,8 @@ test-integration = [ { name = "nltk", specifier = ">=3.9.1,<4.0.0" }, { name = "scipy", marker = "python_full_version == '3.12.*'", specifier = ">=1.7.0,<2.0.0" }, { name = "scipy", marker = "python_full_version >= '3.13'", specifier = ">=1.14.1,<2.0.0" }, - { name = "sentence-transformers", specifier = ">=3.0.1,<4.0.0" }, - { name = "spacy", specifier = ">=3.8.7,<4.0.0" }, + { name = "sentence-transformers", marker = "python_full_version < '3.14'", specifier = ">=3.0.1,<4.0.0" }, + { name = "spacy", marker = "python_full_version < '3.14'", specifier = ">=3.8.7,<4.0.0" }, { name = "thinc", specifier = ">=8.3.6,<9.0.0" }, { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "transformers", specifier = ">=4.51.3,<5.0.0" }, @@ -832,28 +833,47 @@ wheels = [ [[package]] name = "mypy" -version = "0.991" +version = "1.18.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions" }, + { name = "pathspec" }, { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0e/5c/fbe112ca73d4c6a9e65336f48099c60800514d8949b4129c093a84a28dc8/mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06", size = 2688198, upload-time = "2022-11-14T16:59:24.952Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/d0/81d47bffc80d0cff84174aab266adc3401e735e13c5613418e825c146986/mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab", size = 18805560, upload-time = "2022-11-14T16:59:12.453Z" }, - { url = "https://files.pythonhosted.org/packages/d7/f4/dcab9f3c5ed410caca1b9374dbb2b2caa778d225e32f174e266e20291edf/mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d", size = 11117673, upload-time = "2022-11-14T16:58:46.973Z" }, - { url = "https://files.pythonhosted.org/packages/87/ec/62fd00fa5d8ead3ecafed3eb99ee805911f41b11536c5940df1bcb2c845d/mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6", size = 10023879, upload-time = "2022-11-14T16:59:04.238Z" }, - { url = "https://files.pythonhosted.org/packages/39/05/7a7d58afc7d00e819e553ad2485a29141e14575e3b0c43b9da6f869ede4c/mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb", size = 18209901, upload-time = "2022-11-14T16:59:20.559Z" }, - { url = "https://files.pythonhosted.org/packages/80/23/76e56e004acca691b4da4086a8c38bd67b7ae73536848dcab76cfed5c188/mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305", size = 19680479, upload-time = "2022-11-14T16:58:18.184Z" }, - { url = "https://files.pythonhosted.org/packages/f3/1d/cc67a674f1cd7f1c10619487a4245185f6f8f14cbd685b60709318e9ac27/mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c", size = 8670519, upload-time = "2022-11-14T16:59:08.595Z" }, - { url = "https://files.pythonhosted.org/packages/b8/ab/aa2e02fce8ee8885fe98ee2a0549290e9de5caa28febc0cf243bfab020e7/mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372", size = 18600640, upload-time = "2022-11-14T16:58:42.234Z" }, - { url = "https://files.pythonhosted.org/packages/28/9c/e1805f2fea93a92671f33b00dd577119f37e4a8b859d6f6ea62d3e9129fa/mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f", size = 11004105, upload-time = "2022-11-14T16:58:13.53Z" }, - { url = "https://files.pythonhosted.org/packages/df/bb/3cf400e05e30939a0fc58b34e0662d8abe8e206464665065b56cf2ca9a62/mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33", size = 9939717, upload-time = "2022-11-14T16:58:50.894Z" }, - { url = "https://files.pythonhosted.org/packages/14/05/5a4206e269268f4aecb1096bf2375a231c959987ccf3e31313221b8bc153/mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05", size = 18068884, upload-time = "2022-11-14T16:58:24.629Z" }, - { url = "https://files.pythonhosted.org/packages/4b/98/125e5d14222de8e92f44314f8df21a9c351b531b37c551526acd67486a7d/mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad", size = 19451243, upload-time = "2022-11-14T16:59:16.468Z" }, - { url = "https://files.pythonhosted.org/packages/89/76/7159258fdbf26a5ceef100b80a82d2f79b9066725a5daeb6383a8f773910/mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297", size = 8666646, upload-time = "2022-11-14T16:58:09.231Z" }, - { url = "https://files.pythonhosted.org/packages/e7/a1/c503a15ad69ff133a76c159b8287f0eadc1f521d9796bf81f935886c98f6/mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb", size = 2307767, upload-time = "2022-11-14T16:56:33.004Z" }, + { url = "https://files.pythonhosted.org/packages/03/6f/657961a0743cff32e6c0611b63ff1c1970a0b482ace35b069203bf705187/mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c", size = 12807973, upload-time = "2025-09-19T00:10:35.282Z" }, + { url = "https://files.pythonhosted.org/packages/10/e9/420822d4f661f13ca8900f5fa239b40ee3be8b62b32f3357df9a3045a08b/mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e", size = 11896527, upload-time = "2025-09-19T00:10:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/aa/73/a05b2bbaa7005f4642fcfe40fb73f2b4fb6bb44229bd585b5878e9a87ef8/mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b", size = 12507004, upload-time = "2025-09-19T00:11:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/4f/01/f6e4b9f0d031c11ccbd6f17da26564f3a0f3c4155af344006434b0a05a9d/mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66", size = 13245947, upload-time = "2025-09-19T00:10:46.923Z" }, + { url = "https://files.pythonhosted.org/packages/d7/97/19727e7499bfa1ae0773d06afd30ac66a58ed7437d940c70548634b24185/mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428", size = 13499217, upload-time = "2025-09-19T00:09:39.472Z" }, + { url = "https://files.pythonhosted.org/packages/9f/4f/90dc8c15c1441bf31cf0f9918bb077e452618708199e530f4cbd5cede6ff/mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed", size = 9766753, upload-time = "2025-09-19T00:10:49.161Z" }, + { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, + { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, + { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, + { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, + { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, ] [[package]] @@ -1104,6 +1124,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -1222,7 +1251,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1230,96 +1259,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] diff --git a/libs/partners/qdrant/README.md b/libs/partners/qdrant/README.md index 43dcbc0a961..d78c888d9c6 100644 --- a/libs/partners/qdrant/README.md +++ b/libs/partners/qdrant/README.md @@ -1,5 +1,22 @@ # langchain-qdrant +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-qdrant?label=%20)](https://pypi.org/project/langchain-qdrant/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-qdrant)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-qdrant)](https://pypistats.org/packages/langchain-qdrant) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-qdrant +``` + +## πŸ€” What is this? + This package contains the LangChain integration with [Qdrant](https://qdrant.tech/). +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/qdrant) for more details. diff --git a/libs/partners/qdrant/langchain_qdrant/fastembed_sparse.py b/libs/partners/qdrant/langchain_qdrant/fastembed_sparse.py index fc6e72a8452..8838c3ea340 100644 --- a/libs/partners/qdrant/langchain_qdrant/fastembed_sparse.py +++ b/libs/partners/qdrant/langchain_qdrant/fastembed_sparse.py @@ -28,23 +28,23 @@ class FastEmbedSparse(SparseEmbeddings): For a list of available models, see [the Qdrant docs](https://qdrant.github.io/fastembed/examples/Supported_Models/). Args: - model_name (str): The name of the model to use. Defaults to `"Qdrant/bm25"`. - batch_size (int): Batch size for encoding. Defaults to 256. + model_name (str): The name of the model to use. + batch_size (int): Batch size for encoding. cache_dir (str, optional): The path to the model cache directory.\ - Can also be set using the\ - `FASTEMBED_CACHE_PATH` env variable. + Can also be set using the\ + `FASTEMBED_CACHE_PATH` env variable. threads (int, optional): The number of threads onnxruntime session can use. providers (Sequence[Any], optional): List of ONNX execution providers.\ parallel (int, optional): If `>1`, data-parallel encoding will be used, r\ - Recommended for encoding of large datasets.\ - If `0`, use all available cores.\ - If `None`, don't use data-parallel processing,\ - use default onnxruntime threading instead.\ - Defaults to `None`. - kwargs: Additional options to pass to fastembed.SparseTextEmbedding - Raises: - ValueError: If the model_name is not supported in SparseTextEmbedding. + Recommended for encoding of large datasets.\ + If `0`, use all available cores.\ + If `None`, don't use data-parallel processing,\ + use default onnxruntime threading instead.\ + kwargs: Additional options to pass to `fastembed.SparseTextEmbedding` + + Raises: + ValueError: If the `model_name` is not supported in `SparseTextEmbedding`. """ try: from fastembed import ( # type: ignore[import-not-found] # noqa: PLC0415 diff --git a/libs/partners/qdrant/langchain_qdrant/qdrant.py b/libs/partners/qdrant/langchain_qdrant/qdrant.py index 922c4122658..c6952d6b143 100644 --- a/libs/partners/qdrant/langchain_qdrant/qdrant.py +++ b/libs/partners/qdrant/langchain_qdrant/qdrant.py @@ -10,14 +10,11 @@ from typing import ( Any, ) -import numpy as np from langchain_core.documents import Document from langchain_core.embeddings import Embeddings from langchain_core.vectorstores import VectorStore from qdrant_client import QdrantClient, models -from langchain_qdrant._utils import maximal_marginal_relevance - if TYPE_CHECKING: from collections.abc import Generator, Iterable, Sequence @@ -47,17 +44,17 @@ class QdrantVectorStore(VectorStore): ``` Key init args β€” indexing params: - collection_name: str + collection_name: Name of the collection. - embedding: Embeddings + embedding: Embedding function to use. - sparse_embedding: SparseEmbeddings + sparse_embedding: Optional sparse embedding function to use. Key init args β€” client params: - client: QdrantClient + client: Qdrant client to use. - retrieval_mode: RetrievalMode + retrieval_mode: Retrieval mode to use. Instantiate: @@ -376,6 +373,7 @@ class QdrantVectorStore(VectorStore): """Construct an instance of `QdrantVectorStore` from a list of texts. This is a user-friendly interface that: + 1. Creates embeddings, one for each text 2. Creates a Qdrant collection if it doesn't exist. 3. Adds the text embeddings to the Qdrant database @@ -502,10 +500,10 @@ class QdrantVectorStore(VectorStore): batch_size: int = 64, **kwargs: Any, ) -> list[str | int]: - """Add texts with embeddings to the vectorstore. + """Add texts with embeddings to the `VectorStore`. Returns: - List of ids from adding the texts into the vectorstore. + List of ids from adding the texts into the `VectorStore`. """ added_ids = [] @@ -534,7 +532,7 @@ class QdrantVectorStore(VectorStore): """Return docs most similar to query. Returns: - List of Documents most similar to the query. + List of `Document` objects most similar to the query. """ results = self.similarity_search_with_score( @@ -658,7 +656,7 @@ class QdrantVectorStore(VectorStore): """Return docs most similar to embedding vector. Returns: - List of Documents most similar to the query and distance for each. + List of `Document` objects most similar to the query and distance for each. """ qdrant_filter = filter @@ -712,7 +710,7 @@ class QdrantVectorStore(VectorStore): """Return docs most similar to embedding vector. Returns: - List of Documents most similar to the query. + List of `Document` objects most similar to the query. """ results = self.similarity_search_with_score_by_vector( @@ -745,7 +743,7 @@ class QdrantVectorStore(VectorStore): among selected documents. Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. """ self._validate_collection_for_dense( @@ -788,7 +786,7 @@ class QdrantVectorStore(VectorStore): among selected documents. Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. """ results = self.max_marginal_relevance_search_with_score_by_vector( @@ -822,16 +820,18 @@ class QdrantVectorStore(VectorStore): among selected documents. Returns: - List of Documents selected by maximal marginal relevance and distance for - each. - + List of `Document` objects selected by maximal marginal relevance and + distance for each. """ results = self.client.query_points( collection_name=self.collection_name, - query=embedding, + query=models.NearestQuery( + nearest=embedding, + mmr=models.Mmr(diversity=lambda_mult, candidates_limit=fetch_k), + ), query_filter=filter, search_params=search_params, - limit=fetch_k, + limit=k, with_payload=True, with_vectors=True, score_threshold=score_threshold, @@ -840,26 +840,17 @@ class QdrantVectorStore(VectorStore): **kwargs, ).points - embeddings = [ - result.vector - if isinstance(result.vector, list) - else result.vector.get(self.vector_name) # type: ignore[union-attr] - for result in results - ] - mmr_selected = maximal_marginal_relevance( - np.array(embedding), embeddings, k=k, lambda_mult=lambda_mult - ) return [ ( self._document_from_point( - results[i], + result, self.collection_name, self.content_payload_key, self.metadata_payload_key, ), - results[i].score, + result.score, ) - for i in mmr_selected + for result in results ] def delete( # type: ignore[override] @@ -874,7 +865,7 @@ class QdrantVectorStore(VectorStore): **kwargs: Other keyword arguments that subclasses might use. Returns: - True if deletion is successful, False otherwise. + True if deletion is successful, `False` otherwise. """ result = self.client.delete( diff --git a/libs/partners/qdrant/langchain_qdrant/vectorstores.py b/libs/partners/qdrant/langchain_qdrant/vectorstores.py index 47f6c61e06c..95f5500a132 100644 --- a/libs/partners/qdrant/langchain_qdrant/vectorstores.py +++ b/libs/partners/qdrant/langchain_qdrant/vectorstores.py @@ -151,10 +151,10 @@ class Qdrant(VectorStore): batch_size: int = 64, **kwargs: Any, ) -> list[str]: - """Run more texts through the embeddings and add to the vectorstore. + """Run more texts through the embeddings and add to the `VectorStore`. Args: - texts: Iterable of strings to add to the vectorstore. + texts: Iterable of strings to add to the `VectorStore`. metadatas: Optional list of metadatas associated with the texts. ids: Optional list of ids to associate with the texts. Ids have to be @@ -165,7 +165,7 @@ class Qdrant(VectorStore): **kwargs: Additional keyword arguments. Returns: - List of ids from adding the texts into the vectorstore. + List of ids from adding the texts into the `VectorStore`. """ added_ids = [] @@ -188,10 +188,10 @@ class Qdrant(VectorStore): batch_size: int = 64, **kwargs: Any, ) -> list[str]: - """Run more texts through the embeddings and add to the vectorstore. + """Run more texts through the embeddings and add to the `VectorStore`. Args: - texts: Iterable of strings to add to the vectorstore. + texts: Iterable of strings to add to the `VectorStore`. metadatas: Optional list of metadatas associated with the texts. ids: Optional list of ids to associate with the texts. Ids have to be @@ -202,7 +202,7 @@ class Qdrant(VectorStore): **kwargs: Additional keyword arguments. Returns: - List of ids from adding the texts into the vectorstore. + List of ids from adding the texts into the `VectorStore`. """ if self.async_client is None or isinstance( @@ -237,8 +237,8 @@ class Qdrant(VectorStore): Args: query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of Documents to return. + filter: Filter by metadata. search_params: Additional search params offset: Offset of the first result to return. @@ -265,7 +265,7 @@ class Qdrant(VectorStore): Any other named arguments to pass through to QdrantClient.search() Returns: - List of Documents most similar to the query. + List of `Document` objects most similar to the query. """ results = self.similarity_search_with_score( @@ -292,12 +292,12 @@ class Qdrant(VectorStore): Args: query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of Documents to return. + filter: Filter by metadata. **kwargs: Additional keyword arguments. Returns: - List of Documents most similar to the query. + List of `Document` objects most similar to the query. """ results = await self.asimilarity_search_with_score(query, k, filter, **kwargs) @@ -318,8 +318,8 @@ class Qdrant(VectorStore): Args: query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of Documents to return. + filter: Filter by metadata. search_params: Additional search params offset: Offset of the first result to return. @@ -376,8 +376,8 @@ class Qdrant(VectorStore): Args: query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of Documents to return. + filter: Filter by metadata. search_params: Additional search params offset: Offset of the first result to return. @@ -435,8 +435,8 @@ class Qdrant(VectorStore): Args: embedding: Embedding vector to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of Documents to return. + filter: Filter by metadata. search_params: Additional search params offset: Offset of the first result to return. @@ -463,7 +463,7 @@ class Qdrant(VectorStore): Any other named arguments to pass through to QdrantClient.search() Returns: - List of Documents most similar to the query. + List of `Document` objects most similar to the query. """ results = self.similarity_search_with_score_by_vector( @@ -494,8 +494,8 @@ class Qdrant(VectorStore): Args: embedding: Embedding vector to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of Documents to return. + filter: Filter by metadata. search_params: Additional search params offset: Offset of the first result to return. @@ -523,7 +523,7 @@ class Qdrant(VectorStore): AsyncQdrantClient.Search(). Returns: - List of Documents most similar to the query. + List of `Document` objects most similar to the query. """ results = await self.asimilarity_search_with_score_by_vector( @@ -553,8 +553,8 @@ class Qdrant(VectorStore): Args: embedding: Embedding vector to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of Documents to return. + filter: Filter by metadata. search_params: Additional search params offset: Offset of the first result to return. @@ -642,8 +642,8 @@ class Qdrant(VectorStore): Args: embedding: Embedding vector to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter: Filter by metadata. Defaults to `None`. + k: Number of Documents to return. + filter: Filter by metadata. search_params: Additional search params offset: Offset of the first result to return. @@ -740,12 +740,12 @@ class Qdrant(VectorStore): Args: query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. + k: Number of Documents to return. fetch_k: Number of Documents to fetch to pass to MMR algorithm. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding to maximum diversity - and 1 to minimum diversity. - filter: Filter by metadata. Defaults to `None`. + lambda_mult: Number between `0` and `1` that determines the degree + of diversity among the results with `0` corresponding to maximum + diversity and `1` to minimum diversity. + filter: Filter by metadata. search_params: Additional search params score_threshold: Define a minimal score threshold for the result. @@ -768,7 +768,7 @@ class Qdrant(VectorStore): Any other named arguments to pass through to QdrantClient.search() Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. """ query_embedding = self._embed_query(query) @@ -804,13 +804,12 @@ class Qdrant(VectorStore): Args: query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. + k: Number of Documents to return. fetch_k: Number of Documents to fetch to pass to MMR algorithm. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter: Filter by metadata. Defaults to `None`. + lambda_mult: Number between `0` and `1` that determines the degree + of diversity among the results with `0` corresponding + to maximum diversity and `1` to minimum diversity. + filter: Filter by metadata. search_params: Additional search params score_threshold: Define a minimal score threshold for the result. @@ -822,19 +821,20 @@ class Qdrant(VectorStore): Read consistency of the search. Defines how many replicas should be queried before returning the result. Values: - - int - number of replicas to query, values should present in all + - `int` - number of replicas to query, values should present in all queried replicas - - 'majority' - query all replicas, but return values present in the + - `'majority'` - query all replicas, but return values present in the majority of replicas - - 'quorum' - query the majority of replicas, return values present in + - `'quorum'` - query the majority of replicas, return values present in all of them - - 'all' - query all replicas, and return values present in all replicas + - `'all'` - query all replicas, and return values present in all + replicas **kwargs: Any other named arguments to pass through to - AsyncQdrantClient.Search(). + `AsyncQdrantClient.Search()`. Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. """ query_embedding = await self._aembed_query(query) @@ -869,36 +869,36 @@ class Qdrant(VectorStore): Args: embedding: Embedding to look up documents similar to. - k: Number of Documents to return. Defaults to 4. + k: Number of Documents to return. fetch_k: Number of Documents to fetch to pass to MMR algorithm. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter: Filter by metadata. Defaults to `None`. + lambda_mult: Number between `0` and `1` that determines the degree + of diversity among the results with `0` corresponding + to maximum diversity and `1` to minimum diversity. + filter: Filter by metadata. search_params: Additional search params score_threshold: Define a minimal score threshold for the result. If defined, less similar results will not be returned. Score of the returned result might be higher or smaller than the threshold depending on the Distance function used. - E.g. for cosine similarity only higher scores will be returned. + e.g. for cosine similarity only higher scores will be returned. consistency: Read consistency of the search. Defines how many replicas should be queried before returning the result. Values: - - int - number of replicas to query, values should present in all + - `int` - number of replicas to query, values should present in all queried replicas - - 'majority' - query all replicas, but return values present in the + - `'majority'` - query all replicas, but return values present in the majority of replicas - - 'quorum' - query the majority of replicas, return values present in + - `'quorum'` - query the majority of replicas, return values present in all of them - - 'all' - query all replicas, and return values present in all replicas + - `'all'` - query all replicas, and return values present in all + replicas **kwargs: - Any other named arguments to pass through to QdrantClient.search() + Any other named arguments to pass through to `QdrantClient.search()` Returns: - List of Documents selected by maximal marginal relevance. + List of `Document` objects selected by maximal marginal relevance. """ results = self.max_marginal_relevance_search_with_score_by_vector( @@ -934,13 +934,12 @@ class Qdrant(VectorStore): Args: embedding: Embedding vector to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - fetch_k: Number of Documents to fetch to pass to MMR algorithm. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter: Filter by metadata. Defaults to `None`. + k: Number of `Document` objects to return. + fetch_k: Number of `Document` to fetch to pass to MMR algorithm. + lambda_mult: Number between `0` and `1` that determines the degree + of diversity among the results with `0` corresponding + to maximum diversity and `1` to minimum diversity. + filter: Filter by metadata. search_params: Additional search params score_threshold: Define a minimal score threshold for the result. @@ -952,20 +951,21 @@ class Qdrant(VectorStore): Read consistency of the search. Defines how many replicas should be queried before returning the result. Values: - - int - number of replicas to query, values should present in all + - `int` - number of replicas to query, values should present in all queried replicas - - 'majority' - query all replicas, but return values present in the - majority of replicas - - 'quorum' - query the majority of replicas, return values present in - all of them - - 'all' - query all replicas, and return values present in all replicas + - `'majority'` - query all replicas, but return values present in the + majority of replicas + - `'quorum'` - query the majority of replicas, return values present in + all of them + - `'all'` - query all replicas, and return values present in all + replicas **kwargs: Any other named arguments to pass through to - AsyncQdrantClient.Search(). + `AsyncQdrantClient.Search()`. Returns: - List of Documents selected by maximal marginal relevance and distance for - each. + List of `Document` objects selected by maximal marginal relevance and + distance for each. """ results = await self.amax_marginal_relevance_search_with_score_by_vector( @@ -1000,14 +1000,12 @@ class Qdrant(VectorStore): Args: embedding: Embedding vector to look up documents similar to. - k: Number of Documents to return. Defaults to 4. + k: Number of Documents to return. fetch_k: Number of Documents to fetch to pass to MMR algorithm. - Defaults to 20. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter: Filter by metadata. Defaults to `None`. + lambda_mult: Number between `0` and `1` that determines the degree of + diversity among the results with `0` corresponding to maximum diversity + and `1` to minimum diversity. + filter: Filter by metadata. search_params: Additional search params score_threshold: Define a minimal score threshold for the result. @@ -1022,17 +1020,16 @@ class Qdrant(VectorStore): - int - number of replicas to query, values should present in all queried replicas - 'majority' - query all replicas, but return values present in the - majority of replicas + majority of replicas - 'quorum' - query the majority of replicas, return values present in - all of them + all of them - 'all' - query all replicas, and return values present in all replicas **kwargs: Any other named arguments to pass through to QdrantClient.search() Returns: - List of Documents selected by maximal marginal relevance and distance for - each. - + List of `Document` objects selected by maximal marginal relevance and + distance for each. """ query_vector = embedding if self.vector_name is not None: @@ -1092,23 +1089,20 @@ class Qdrant(VectorStore): Args: embedding: Embedding vector to look up documents similar to. - k: Number of Documents to return. Defaults to 4. + k: Number of Documents to return. fetch_k: Number of Documents to fetch to pass to MMR algorithm. - Defaults to 20. - lambda_mult: Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter: Filter by metadata. Defaults to `None`. + lambda_mult: Number between `0` and `1` that determines the degree of + diversity among the results with `0` corresponding to maximum diversity + and `1` to minimum diversity. + filter: Filter by metadata. search_params: Additional search params. score_threshold: Define a minimal score threshold for the result. consistency: Read consistency of the search. **kwargs: Additional keyword arguments. Returns: - List of Documents selected by maximal marginal relevance and distance for - each. - + List of `Document` objects selected by maximal marginal relevance and + distance for each. """ if self.async_client is None or isinstance( self.async_client._client, AsyncQdrantLocal @@ -1161,7 +1155,7 @@ class Qdrant(VectorStore): **kwargs: Other keyword arguments that subclasses might use. Returns: - True if deletion is successful, False otherwise. + True if deletion is successful, `False` otherwise. """ result = self.client.delete( @@ -1179,7 +1173,7 @@ class Qdrant(VectorStore): **kwargs: Other keyword arguments that subclasses might use. Returns: - True if deletion is successful, False otherwise. + True if deletion is successful, `False` otherwise. """ if self.async_client is None or isinstance( @@ -1324,9 +1318,10 @@ class Qdrant(VectorStore): Additional arguments passed directly into REST client initialization This is a user-friendly interface that: + 1. Creates embeddings, one for each text 2. Initializes the Qdrant database as an in-memory docstore by default - (and overridable to a remote docstore) + (and overridable to a remote docstore) 3. Adds the text embeddings to the Qdrant database This is intended to be a quick way to get started. @@ -1564,6 +1559,7 @@ class Qdrant(VectorStore): Additional arguments passed directly into REST client initialization This is a user-friendly interface that: + 1. Creates embeddings, one for each text 2. Initializes the Qdrant database as an in-memory docstore by default (and overridable to a remote docstore) @@ -1977,19 +1973,19 @@ class Qdrant(VectorStore): k: int = 4, **kwargs: Any, ) -> list[tuple[Document, float]]: - """Return docs and relevance scores in the range [0, 1]. + """Return docs and relevance scores in the range `[0, 1]`. - 0 is dissimilar, 1 is most similar. + `0` is dissimilar, `1` is most similar. Args: query: input text - k: Number of Documents to return. Defaults to 4. - **kwargs: kwargs to be passed to similarity search. Should include: - score_threshold: Optional, a floating point value between 0 to 1 to - filter the resulting set of retrieved docs + k: Number of Documents to return. + **kwargs: kwargs to be passed to similarity search. Should include + `score_threshold`, An optional floating point value between `0` to `1` + to filter the resulting set of retrieved docs Returns: - List of Tuples of (doc, similarity_score) + List of tuples of `(doc, similarity_score)` """ return self.similarity_search_with_score(query, k, **kwargs) @@ -2001,19 +1997,19 @@ class Qdrant(VectorStore): k: int = 4, **kwargs: Any, ) -> list[tuple[Document, float]]: - """Return docs and relevance scores in the range [0, 1]. + """Return docs and relevance scores in the range `[0, 1]`. - 0 is dissimilar, 1 is most similar. + `0` is dissimilar, `1` is most similar. Args: query: input text - k: Number of Documents to return. Defaults to 4. - **kwargs: kwargs to be passed to similarity search. Should include: - score_threshold: Optional, a floating point value between 0 to 1 to - filter the resulting set of retrieved docs + k: Number of Documents to return. + **kwargs: kwargs to be passed to similarity search. Should include + `score_threshold`, An optional floating point value between `0` to `1` + to filter the resulting set of retrieved docs Returns: - List of Tuples of (doc, similarity_score) + List of tuples of `(doc, similarity_score)` """ return await self.asimilarity_search_with_score(query, k, **kwargs) diff --git a/libs/partners/qdrant/pyproject.toml b/libs/partners/qdrant/pyproject.toml index 3985fcc99cb..bcaad52421b 100644 --- a/libs/partners/qdrant/pyproject.toml +++ b/libs/partners/qdrant/pyproject.toml @@ -7,23 +7,23 @@ authors = [] license = { text = "MIT" } requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "qdrant-client>=1.10.1,<2.0.0", + "qdrant-client>=1.15.1,<2.0.0", "pydantic>=2.7.4,<3.0.0", - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", ] name = "langchain-qdrant" -version = "1.0.0a1" +version = "1.1.0" description = "An integration package connecting Qdrant and LangChain" readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/qdrant" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/qdrant" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-qdrant%22" -docs = "https://reference.langchain.com/python/integrations/langchain_qdrant/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/qdrant" +Documentation = "https://reference.langchain.com/python/integrations/langchain_qdrant/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/qdrant" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-qdrant%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [project.optional-dependencies] fastembed = [ diff --git a/libs/partners/qdrant/tests/integration_tests/qdrant_vector_store/test_mmr.py b/libs/partners/qdrant/tests/integration_tests/qdrant_vector_store/test_mmr.py index 8e96fc70993..650bf45b4f7 100644 --- a/libs/partners/qdrant/tests/integration_tests/qdrant_vector_store/test_mmr.py +++ b/libs/partners/qdrant/tests/integration_tests/qdrant_vector_store/test_mmr.py @@ -63,7 +63,7 @@ def test_qdrant_mmr_search( output, [ Document(page_content="foo", metadata={"page": 0}), - Document(page_content="baz", metadata={"page": 2}), + Document(page_content="bar", metadata={"page": 1}), ], ) diff --git a/libs/partners/qdrant/uv.lock b/libs/partners/qdrant/uv.lock index 48008a243d5..ced258758f3 100644 --- a/libs/partners/qdrant/uv.lock +++ b/libs/partners/qdrant/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.10.0, <4.0.0" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -506,7 +506,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a8" +version = "1.0.0" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -564,7 +564,7 @@ typing = [ [[package]] name = "langchain-qdrant" -version = "1.0.0a1" +version = "1.1.0" source = { editable = "." } dependencies = [ { name = "langchain-core" }, @@ -608,7 +608,7 @@ requires-dist = [ { name = "fastembed", marker = "python_full_version >= '3.9' and python_full_version < '3.13' and extra == 'fastembed'", specifier = ">=0.3.3,<1.0.0" }, { name = "langchain-core", editable = "../../core" }, { name = "pydantic", specifier = ">=2.7.4,<3.0.0" }, - { name = "qdrant-client", specifier = ">=1.10.1,<2.0.0" }, + { name = "qdrant-client", specifier = ">=1.15.1,<2.0.0" }, ] provides-extras = ["fastembed"] @@ -637,7 +637,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -1595,7 +1595,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1603,96 +1603,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] diff --git a/libs/partners/xai/README.md b/libs/partners/xai/README.md index e6f8ebbbe10..872a5b6b7eb 100644 --- a/libs/partners/xai/README.md +++ b/libs/partners/xai/README.md @@ -1,5 +1,22 @@ # langchain-xai +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-xai?label=%20)](https://pypi.org/project/langchain-xai/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-xai)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-xai)](https://pypistats.org/packages/langchain-xai) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). + +## Quick Install + +```bash +pip install langchain-xai +``` + +## πŸ€” What is this? + This package contains the LangChain integrations for [xAI](https://x.ai/) through their [APIs](https://console.x.ai). +## πŸ“– Documentation + View the [documentation](https://docs.langchain.com/oss/python/integrations/providers/xai) for more details. diff --git a/libs/partners/xai/langchain_xai/chat_models.py b/libs/partners/xai/langchain_xai/chat_models.py index 2f0224bfa21..c407bbfd730 100644 --- a/libs/partners/xai/langchain_xai/chat_models.py +++ b/libs/partners/xai/langchain_xai/chat_models.py @@ -38,31 +38,31 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] ``` Key init args β€” completion params: - model: str + model: Name of model to use. - temperature: float + temperature: Sampling temperature between `0` and `2`. Higher values mean more random completions, while lower values (like `0.2`) mean more focused and deterministic completions. (Default: `1`.) - max_tokens: int | None + max_tokens: Max number of tokens to generate. Refer to your [model's documentation](https://docs.x.ai/docs/models#model-pricing) for the maximum number of tokens it can generate. - logprobs: bool | None + logprobs: Whether to return logprobs. Key init args β€” client params: - timeout: Union[float, Tuple[float, float], Any, None] + timeout: Timeout for requests. - max_retries: int + max_retries: Max number of retries. - api_key: str | None + api_key: xAI API key. If not passed in will be read from env var `XAI_API_KEY`. Instantiate: ```python from langchain_xai import ChatXAI - llm = ChatXAI( + model = ChatXAI( model="grok-4", temperature=0, max_tokens=None, @@ -82,7 +82,7 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] ), ("human", "I love programming."), ] - llm.invoke(messages) + model.invoke(messages) ``` ```python @@ -110,7 +110,7 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] Stream: ```python - for chunk in llm.stream(messages): + for chunk in model.stream(messages): print(chunk.text, end="") ``` @@ -129,13 +129,13 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] Async: ```python - await llm.ainvoke(messages) + await model.ainvoke(messages) # stream: - # async for chunk in (await llm.astream(messages)) + # async for chunk in (await model.astream(messages)) # batch: - # await llm.abatch([messages]) + # await model.abatch([messages]) ``` ```python @@ -166,7 +166,7 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] which allows the model to provide reasoning content along with the response. If provided, reasoning content is returned under the `additional_kwargs` field of the - AIMessage or AIMessageChunk. + `AIMessage` or `AIMessageChunk`. If supported, reasoning effort can be specified in the model constructor's `extra_body` argument, which will control the amount of reasoning the model does. The value can be one of @@ -189,105 +189,106 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] reasoning cannot be disabled, and the `reasoning_effort` cannot be specified. Tool calling / function calling: - ```python - from pydantic import BaseModel, Field - llm = ChatXAI(model="grok-4") + ```python + from pydantic import BaseModel, Field + + model = ChatXAI(model="grok-4") - class GetWeather(BaseModel): - '''Get the current weather in a given location''' + class GetWeather(BaseModel): + '''Get the current weather in a given location''' - location: str = Field(..., description="The city and state, e.g. San Francisco, CA") + location: str = Field(..., description="The city and state, e.g. San Francisco, CA") - class GetPopulation(BaseModel): - '''Get the current population in a given location''' + class GetPopulation(BaseModel): + '''Get the current population in a given location''' - location: str = Field(..., description="The city and state, e.g. San Francisco, CA") + location: str = Field(..., description="The city and state, e.g. San Francisco, CA") - llm_with_tools = llm.bind_tools([GetWeather, GetPopulation]) - ai_msg = llm_with_tools.invoke("Which city is bigger: LA or NY?") - ai_msg.tool_calls - ``` + model_with_tools = model.bind_tools([GetWeather, GetPopulation]) + ai_msg = model_with_tools.invoke("Which city is bigger: LA or NY?") + ai_msg.tool_calls + ``` - ```python - [ - { - "name": "GetPopulation", - "args": {"location": "NY"}, - "id": "call_m5tstyn2004pre9bfuxvom8x", - "type": "tool_call", - }, - { - "name": "GetPopulation", - "args": {"location": "LA"}, - "id": "call_0vjgq455gq1av5sp9eb1pw6a", - "type": "tool_call", - }, - ] - ``` + ```python + [ + { + "name": "GetPopulation", + "args": {"location": "NY"}, + "id": "call_m5tstyn2004pre9bfuxvom8x", + "type": "tool_call", + }, + { + "name": "GetPopulation", + "args": {"location": "LA"}, + "id": "call_0vjgq455gq1av5sp9eb1pw6a", + "type": "tool_call", + }, + ] + ``` - !!! note - With stream response, the tool / function call will be returned in whole in a - single chunk, instead of being streamed across chunks. + !!! note + With stream response, the tool / function call will be returned in whole in a + single chunk, instead of being streamed across chunks. - Tool choice can be controlled by setting the `tool_choice` parameter in the model - constructor's `extra_body` argument. For example, to disable tool / function calling: + Tool choice can be controlled by setting the `tool_choice` parameter in the model + constructor's `extra_body` argument. For example, to disable tool / function calling: - ```python - llm = ChatXAI(model="grok-4", extra_body={"tool_choice": "none"}) - ``` - To require that the model always calls a tool / function, set `tool_choice` to `'required'`: + ```python + model = ChatXAI(model="grok-4", extra_body={"tool_choice": "none"}) + ``` + To require that the model always calls a tool / function, set `tool_choice` to `'required'`: - ```python - llm = ChatXAI(model="grok-4", extra_body={"tool_choice": "required"}) - ``` + ```python + model = ChatXAI(model="grok-4", extra_body={"tool_choice": "required"}) + ``` - To specify a tool / function to call, set `tool_choice` to the name of the tool / function: + To specify a tool / function to call, set `tool_choice` to the name of the tool / function: - ```python - from pydantic import BaseModel, Field + ```python + from pydantic import BaseModel, Field - llm = ChatXAI( - model="grok-4", - extra_body={ - "tool_choice": {"type": "function", "function": {"name": "GetWeather"}} - }, - ) + model = ChatXAI( + model="grok-4", + extra_body={ + "tool_choice": {"type": "function", "function": {"name": "GetWeather"}} + }, + ) - class GetWeather(BaseModel): - \"\"\"Get the current weather in a given location\"\"\" + class GetWeather(BaseModel): + \"\"\"Get the current weather in a given location\"\"\" - location: str = Field(..., description='The city and state, e.g. San Francisco, CA') + location: str = Field(..., description='The city and state, e.g. San Francisco, CA') - class GetPopulation(BaseModel): - \"\"\"Get the current population in a given location\"\"\" + class GetPopulation(BaseModel): + \"\"\"Get the current population in a given location\"\"\" - location: str = Field(..., description='The city and state, e.g. San Francisco, CA') + location: str = Field(..., description='The city and state, e.g. San Francisco, CA') - llm_with_tools = llm.bind_tools([GetWeather, GetPopulation]) - ai_msg = llm_with_tools.invoke( - "Which city is bigger: LA or NY?", - ) - ai_msg.tool_calls - ``` + model_with_tools = model.bind_tools([GetWeather, GetPopulation]) + ai_msg = model_with_tools.invoke( + "Which city is bigger: LA or NY?", + ) + ai_msg.tool_calls + ``` - The resulting tool call would be: + The resulting tool call would be: - ```python - [ - { - "name": "GetWeather", - "args": {"location": "Los Angeles, CA"}, - "id": "call_81668711", - "type": "tool_call", - } - ] - ``` + ```python + [ + { + "name": "GetWeather", + "args": {"location": "Los Angeles, CA"}, + "id": "call_81668711", + "type": "tool_call", + } + ] + ``` Parallel tool calling / parallel function calling: By default, parallel tool / function calling is enabled, so you can process @@ -309,8 +310,8 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] rating: int | None = Field(description="How funny the joke is, from 1 to 10") - structured_llm = llm.with_structured_output(Joke) - structured_llm.invoke("Tell me a joke about cats") + structured_model = model.with_structured_output(Joke) + structured_model.invoke("Tell me a joke about cats") ``` ```python @@ -328,7 +329,7 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] ```python from langchain_xai import ChatXAI - llm = ChatXAI( + model = ChatXAI( model="grok-4", search_parameters={ "mode": "auto", @@ -339,7 +340,7 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] }, ) - llm.invoke("Provide me a digest of world news in the last 24 hours.") + model.invoke("Provide me a digest of world news in the last 24 hours.") ``` !!! note @@ -348,7 +349,7 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] Token usage: ```python - ai_msg = llm.invoke(messages) + ai_msg = model.invoke(messages) ai_msg.usage_metadata ``` @@ -358,9 +359,9 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] Logprobs: ```python - logprobs_llm = llm.bind(logprobs=True) + logprobs_model = model.bind(logprobs=True) messages = [("human", "Say Hello World! Do not return anything else.")] - ai_msg = logprobs_llm.invoke(messages) + ai_msg = logprobs_model.invoke(messages) ai_msg.response_metadata["logprobs"] ``` @@ -373,25 +374,26 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] } ``` - Response metadata - ```python - ai_msg = llm.invoke(messages) - ai_msg.response_metadata - ``` + Response metadata: - ```python - { - "token_usage": { - "completion_tokens": 4, - "prompt_tokens": 19, - "total_tokens": 23, - }, - "model_name": "grok-4", - "system_fingerprint": None, - "finish_reason": "stop", - "logprobs": None, - } - ``` + ```python + ai_msg = model.invoke(messages) + ai_msg.response_metadata + ``` + + ```python + { + "token_usage": { + "completion_tokens": 4, + "prompt_tokens": 19, + "total_tokens": 23, + }, + "model_name": "grok-4", + "system_fingerprint": None, + "finish_reason": "stop", + "logprobs": None, + } + ``` """ # noqa: E501 model_name: str = Field(default="grok-4", alias="model") @@ -426,7 +428,11 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] @classmethod def get_lc_namespace(cls) -> list[str]: - """Get the namespace of the langchain object.""" + """Get the namespace of the LangChain object. + + Returns: + `["langchain_xai", "chat_models"]` + """ return ["langchain_xai", "chat_models"] @property @@ -527,6 +533,9 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] ) -> ChatResult: rtn = super()._create_chat_result(response, generation_info) + for generation in rtn.generations: + generation.message.response_metadata["model_provider"] = "xai" + if not isinstance(response, openai.BaseModel): return rtn @@ -553,6 +562,10 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] default_chunk_class, base_generation_info, ) + + if generation_chunk: + generation_chunk.message.response_metadata["model_provider"] = "xai" + if (choices := chunk.get("choices")) and generation_chunk: top = choices[0] if isinstance(generation_chunk.message, AIMessageChunk) and ( @@ -587,17 +600,19 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] Args: schema: The output schema. Can be passed in as: - - an OpenAI function/tool schema, - - a JSON Schema, - - a `TypedDict` class (support added in 0.1.20), - - or a Pydantic class. + - An OpenAI function/tool schema, + - A JSON Schema, + - A `TypedDict` class, + - Or a Pydantic class. If `schema` is a Pydantic class then the model output will be a Pydantic instance of that class, and the model-generated fields will be validated by the Pydantic class. Otherwise the model output will be a - dict and will not be validated. See `langchain_core.utils.function_calling.convert_to_openai_tool` - for more on how to properly specify types and descriptions of - schema fields when specifying a Pydantic or `TypedDict` class. + dict and will not be validated. + + See `langchain_core.utils.function_calling.convert_to_openai_tool` for + more on how to properly specify types and descriptions of schema fields + when specifying a Pydantic or `TypedDict` class. method: The method for steering model generation, one of: @@ -609,13 +624,18 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] Uses xAI's JSON mode feature. include_raw: - If `False` then only the parsed structured output is returned. If - an error occurs during model output parsing it will be raised. If `True` - then both the raw model response (a BaseMessage) and the parsed model - response will be returned. If an error occurs during output parsing it - will be caught and returned as well. The final output is always a dict - with keys `'raw'`, `'parsed'`, and `'parsing_error'`. + If `False` then only the parsed structured output is returned. + If an error occurs during model output parsing it will be raised. + + If `True` then both the raw model response (a `BaseMessage`) and the + parsed model response will be returned. + + If an error occurs during output parsing it will be caught and returned + as well. + + The final output is always a `dict` with keys `'raw'`, `'parsed'`, and + `'parsing_error'`. strict: - `True`: Model output is guaranteed to exactly match the schema. @@ -629,17 +649,19 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override] kwargs: Additional keyword args aren't supported. Returns: - A Runnable that takes same inputs as a `langchain_core.language_models.chat.BaseChatModel`. + A `Runnable` that takes same inputs as a + `langchain_core.language_models.chat.BaseChatModel`. If `include_raw` is + `False` and `schema` is a Pydantic class, `Runnable` outputs an instance + of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is + `False` then `Runnable` outputs a `dict`. - If `include_raw` is `False` and `schema` is a Pydantic class, Runnable outputs an instance of `schema` (i.e., a Pydantic object). Otherwise, if `include_raw` is `False` then Runnable outputs a dict. + If `include_raw` is `True`, then `Runnable` outputs a `dict` with keys: - If `include_raw` is `True`, then Runnable outputs a dict with keys: - - - `'raw'`: BaseMessage - - `'parsed'`: None if there was a parsing error, otherwise the type depends on the `schema` as described above. - - `'parsing_error'`: BaseException | None - - """ # noqa: E501 + - `'raw'`: `BaseMessage` + - `'parsed'`: `None` if there was a parsing error, otherwise the type + depends on the `schema` as described above. + - `'parsing_error'`: `BaseException | None` + """ # Some applications require that incompatible parameters (e.g., unsupported # methods) be handled. if method == "function_calling" and strict: diff --git a/libs/partners/xai/pyproject.toml b/libs/partners/xai/pyproject.toml index e4ffa34a66a..bbe4682fa40 100644 --- a/libs/partners/xai/pyproject.toml +++ b/libs/partners/xai/pyproject.toml @@ -3,28 +3,29 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [] +name = "langchain-xai" +description = "An integration package connecting xAI and LangChain" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.0" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-openai>=1.0.0a3,<2.0.0", - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-openai>=1.0.0,<2.0.0", + "langchain-core>=1.0.0,<2.0.0", "requests>=2.0.0,<3.0.0", "aiohttp>=3.9.1,<4.0.0", ] -name = "langchain-xai" -version = "1.0.0a1" -description = "An integration package connecting xAI and LangChain" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/oss/python/integrations/providers/xai" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/xai" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-xai%22" -docs = "https://reference.langchain.com/python/integrations/langchain_xai/" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/oss/python/integrations/providers/xai" +Documentation = "https://reference.langchain.com/python/integrations/langchain_xai/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/xai" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-xai%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = [ diff --git a/libs/partners/xai/tests/integration_tests/test_chat_models.py b/libs/partners/xai/tests/integration_tests/test_chat_models.py new file mode 100644 index 00000000000..72d17fc9d19 --- /dev/null +++ b/libs/partners/xai/tests/integration_tests/test_chat_models.py @@ -0,0 +1,97 @@ +"""Integration tests for ChatXAI specific features.""" + +from __future__ import annotations + +from typing import Literal + +import pytest +from langchain_core.messages import AIMessage, AIMessageChunk, BaseMessageChunk + +from langchain_xai import ChatXAI + +MODEL_NAME = "grok-4-fast-reasoning" + + +@pytest.mark.parametrize("output_version", ["", "v1"]) +def test_reasoning(output_version: Literal["", "v1"]) -> None: + """Test reasoning features. + + !!! note + `grok-4` does not return `reasoning_content`, but may optionally return + encrypted reasoning content if `use_encrypted_content` is set to `True`. + """ + # Test reasoning effort + if output_version: + chat_model = ChatXAI( + model="grok-3-mini", + reasoning_effort="low", + output_version=output_version, + ) + else: + chat_model = ChatXAI( + model="grok-3-mini", + reasoning_effort="low", + ) + input_message = "What is 3^3?" + response = chat_model.invoke(input_message) + assert response.content + assert response.additional_kwargs["reasoning_content"] + + # Test streaming + full: BaseMessageChunk | None = None + for chunk in chat_model.stream(input_message): + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert full.additional_kwargs["reasoning_content"] + + # Check that we can access reasoning content blocks + assert response.content_blocks + reasoning_content = ( + block for block in response.content_blocks if block["type"] == "reasoning" + ) + assert len(list(reasoning_content)) >= 1 + + # Test that passing message with reasoning back in works + follow_up_message = "Based on your reasoning, what is 4^4?" + followup = chat_model.invoke([input_message, response, follow_up_message]) + assert followup.content + assert followup.additional_kwargs["reasoning_content"] + followup_reasoning = ( + block for block in followup.content_blocks if block["type"] == "reasoning" + ) + assert len(list(followup_reasoning)) >= 1 + + # Test passing in a ReasoningContentBlock + response_metadata = {"model_provider": "xai"} + if output_version: + response_metadata["output_version"] = output_version + msg_w_reasoning = AIMessage( + content_blocks=response.content_blocks, + response_metadata=response_metadata, + ) + followup_2 = chat_model.invoke( + [msg_w_reasoning, "Based on your reasoning, what is 5^5?"] + ) + assert followup_2.content + assert followup_2.additional_kwargs["reasoning_content"] + + +def test_web_search() -> None: + llm = ChatXAI( + model=MODEL_NAME, + search_parameters={"mode": "on", "max_search_results": 3}, + ) + + # Test invoke + response = llm.invoke("Provide me a digest of world news in the last 24 hours.") + assert response.content + assert response.additional_kwargs["citations"] + assert len(response.additional_kwargs["citations"]) <= 3 + + # Test streaming + full = None + for chunk in llm.stream("Provide me a digest of world news in the last 24 hours."): + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert full.additional_kwargs["citations"] + assert len(full.additional_kwargs["citations"]) <= 3 diff --git a/libs/partners/xai/tests/integration_tests/test_chat_models_standard.py b/libs/partners/xai/tests/integration_tests/test_chat_models_standard.py index dada7d5c4bc..9b7871d6ac2 100644 --- a/libs/partners/xai/tests/integration_tests/test_chat_models_standard.py +++ b/libs/partners/xai/tests/integration_tests/test_chat_models_standard.py @@ -4,26 +4,25 @@ from __future__ import annotations from typing import TYPE_CHECKING -from langchain_core.messages import AIMessageChunk, BaseMessageChunk +import pytest +from langchain_core.messages import AIMessage from langchain_core.rate_limiters import InMemoryRateLimiter from langchain_tests.integration_tests import ( # type: ignore[import-not-found] ChatModelIntegrationTests, # type: ignore[import-not-found] ) +from typing_extensions import override from langchain_xai import ChatXAI if TYPE_CHECKING: from langchain_core.language_models import BaseChatModel -# Initialize the rate limiter in global scope, so it can be re-used -# across tests. +# Initialize the rate limiter in global scope, so it can be re-used across tests rate_limiter = InMemoryRateLimiter( requests_per_second=0.5, ) - -# Not using Grok 4 since it doesn't support reasoning params (effort) or returns -# reasoning content. +MODEL_NAME = "grok-4-fast-reasoning" class TestXAIStandard(ChatModelIntegrationTests): @@ -33,48 +32,28 @@ class TestXAIStandard(ChatModelIntegrationTests): @property def chat_model_params(self) -> dict: - # TODO: bump to test new Grok once they implement other features return { - "model": "grok-3", + "model": MODEL_NAME, "rate_limiter": rate_limiter, "stream_usage": True, } - -def test_reasoning_content() -> None: - """Test reasoning content.""" - chat_model = ChatXAI( - model="grok-3-mini", - reasoning_effort="low", + @pytest.mark.xfail( + reason="Default model does not support stop sequences, using grok-3 instead" ) - response = chat_model.invoke("What is 3^3?") - assert response.content - assert response.additional_kwargs["reasoning_content"] + @override + def test_stop_sequence(self, model: BaseChatModel) -> None: + """Override to use `grok-3` which supports stop sequences.""" + params = {**self.chat_model_params, "model": "grok-3"} - # Test streaming - full: BaseMessageChunk | None = None - for chunk in chat_model.stream("What is 3^3?"): - full = chunk if full is None else full + chunk - assert isinstance(full, AIMessageChunk) - assert full.additional_kwargs["reasoning_content"] + grok3_model = ChatXAI(**params) + result = grok3_model.invoke("hi", stop=["you"]) + assert isinstance(result, AIMessage) -def test_web_search() -> None: - llm = ChatXAI( - model="grok-3", - search_parameters={"mode": "auto", "max_search_results": 3}, - ) - - # Test invoke - response = llm.invoke("Provide me a digest of world news in the last 24 hours.") - assert response.content - assert response.additional_kwargs["citations"] - assert len(response.additional_kwargs["citations"]) <= 3 - - # Test streaming - full = None - for chunk in llm.stream("Provide me a digest of world news in the last 24 hours."): - full = chunk if full is None else full + chunk - assert isinstance(full, AIMessageChunk) - assert full.additional_kwargs["citations"] - assert len(full.additional_kwargs["citations"]) <= 3 + custom_model = ChatXAI( + **params, + stop_sequences=["you"], + ) + result = custom_model.invoke("hi") + assert isinstance(result, AIMessage) diff --git a/libs/partners/xai/tests/unit_tests/__snapshots__/test_chat_models_standard.ambr b/libs/partners/xai/tests/unit_tests/__snapshots__/test_chat_models_standard.ambr index 4cd1261555c..6f1cd18c2e2 100644 --- a/libs/partners/xai/tests/unit_tests/__snapshots__/test_chat_models_standard.ambr +++ b/libs/partners/xai/tests/unit_tests/__snapshots__/test_chat_models_standard.ambr @@ -9,7 +9,7 @@ 'kwargs': dict({ 'max_retries': 2, 'max_tokens': 100, - 'model_name': 'grok-beta', + 'model_name': 'grok-4', 'request_timeout': 60.0, 'stop': list([ ]), diff --git a/libs/partners/xai/tests/unit_tests/test_chat_models_standard.py b/libs/partners/xai/tests/unit_tests/test_chat_models_standard.py index 7c9a947a913..7224655db79 100644 --- a/libs/partners/xai/tests/unit_tests/test_chat_models_standard.py +++ b/libs/partners/xai/tests/unit_tests/test_chat_models_standard.py @@ -7,6 +7,8 @@ from langchain_tests.unit_tests import ( # type: ignore[import-not-found] from langchain_xai import ChatXAI +MODEL_NAME = "grok-4" + class TestXAIStandard(ChatModelUnitTests): @property @@ -15,7 +17,7 @@ class TestXAIStandard(ChatModelUnitTests): @property def chat_model_params(self) -> dict: - return {"model": "grok-beta"} + return {"model": MODEL_NAME} @property def init_from_env_params(self) -> tuple[dict, dict, dict]: @@ -24,7 +26,7 @@ class TestXAIStandard(ChatModelUnitTests): "XAI_API_KEY": "api_key", }, { - "model": "grok-beta", + "model": MODEL_NAME, }, { "xai_api_key": "api_key", diff --git a/libs/partners/xai/uv.lock b/libs/partners/xai/uv.lock index ece0030930e..858102927c0 100644 --- a/libs/partners/xai/uv.lock +++ b/libs/partners/xai/uv.lock @@ -357,7 +357,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ @@ -621,7 +621,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a6" +version = "1.0.0" source = { editable = "../../core" } dependencies = [ { name = "jsonpatch" }, @@ -679,7 +679,7 @@ typing = [ [[package]] name = "langchain-openai" -version = "1.0.0a3" +version = "1.0.0" source = { editable = "../openai" } dependencies = [ { name = "langchain-core" }, @@ -699,8 +699,10 @@ dev = [{ name = "langchain-core", editable = "../../core" }] lint = [{ name = "ruff", specifier = ">=0.13.1,<0.14.0" }] test = [ { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, + { name = "langchain", editable = "../../langchain_v1" }, { name = "langchain-core", editable = "../../core" }, { name = "langchain-tests", editable = "../../standard-tests" }, + { name = "langgraph-prebuilt", specifier = ">=0.7.0rc1" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, { name = "pytest", specifier = ">=7.3.0,<8.0.0" }, @@ -718,7 +720,7 @@ test-integration = [ { name = "httpx", specifier = ">=0.27.0,<1.0.0" }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, - { name = "pillow", specifier = ">=10.3.0,<11.0.0" }, + { name = "pillow", specifier = ">=10.3.0,<12.0.0" }, ] typing = [ { name = "langchain-core", editable = "../../core" }, @@ -728,7 +730,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.0" source = { editable = "../../standard-tests" } dependencies = [ { name = "httpx" }, @@ -773,7 +775,7 @@ typing = [ [[package]] name = "langchain-xai" -version = "1.0.0a1" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "aiohttp" }, @@ -1420,7 +1422,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1428,96 +1430,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] @@ -1934,38 +1963,63 @@ wheels = [ [[package]] name = "tiktoken" -version = "0.11.0" +version = "0.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/86/ad0155a37c4f310935d5ac0b1ccf9bdb635dcb906e0a9a26b616dd55825a/tiktoken-0.11.0.tar.gz", hash = "sha256:3c518641aee1c52247c2b97e74d8d07d780092af79d5911a6ab5e79359d9b06a", size = 37648, upload-time = "2025-08-08T23:58:08.495Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/4d/c6a2e7dca2b4f2e9e0bfd62b3fe4f114322e2c028cfba905a72bc76ce479/tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8a9b517d6331d7103f8bef29ef93b3cca95fa766e293147fe7bacddf310d5917", size = 1059937, upload-time = "2025-08-08T23:57:28.57Z" }, - { url = "https://files.pythonhosted.org/packages/41/54/3739d35b9f94cb8dc7b0db2edca7192d5571606aa2369a664fa27e811804/tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b4ddb1849e6bf0afa6cc1c5d809fb980ca240a5fffe585a04e119519758788c0", size = 999230, upload-time = "2025-08-08T23:57:30.241Z" }, - { url = "https://files.pythonhosted.org/packages/dd/f4/ec8d43338d28d53513004ebf4cd83732a135d11011433c58bf045890cc10/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10331d08b5ecf7a780b4fe4d0281328b23ab22cdb4ff65e68d56caeda9940ecc", size = 1130076, upload-time = "2025-08-08T23:57:31.706Z" }, - { url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882", size = 1183942, upload-time = "2025-08-08T23:57:33.142Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c", size = 1244705, upload-time = "2025-08-08T23:57:34.594Z" }, - { url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1", size = 884152, upload-time = "2025-08-08T23:57:36.18Z" }, - { url = "https://files.pythonhosted.org/packages/8a/91/912b459799a025d2842566fe1e902f7f50d54a1ce8a0f236ab36b5bd5846/tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ae374c46afadad0f501046db3da1b36cd4dfbfa52af23c998773682446097cf", size = 1059743, upload-time = "2025-08-08T23:57:37.516Z" }, - { url = "https://files.pythonhosted.org/packages/8c/e9/6faa6870489ce64f5f75dcf91512bf35af5864583aee8fcb0dcb593121f5/tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25a512ff25dc6c85b58f5dd4f3d8c674dc05f96b02d66cdacf628d26a4e4866b", size = 999334, upload-time = "2025-08-08T23:57:38.595Z" }, - { url = "https://files.pythonhosted.org/packages/a1/3e/a05d1547cf7db9dc75d1461cfa7b556a3b48e0516ec29dfc81d984a145f6/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2130127471e293d385179c1f3f9cd445070c0772be73cdafb7cec9a3684c0458", size = 1129402, upload-time = "2025-08-08T23:57:39.627Z" }, - { url = "https://files.pythonhosted.org/packages/34/9a/db7a86b829e05a01fd4daa492086f708e0a8b53952e1dbc9d380d2b03677/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e43022bf2c33f733ea9b54f6a3f6b4354b909f5a73388fb1b9347ca54a069c", size = 1184046, upload-time = "2025-08-08T23:57:40.689Z" }, - { url = "https://files.pythonhosted.org/packages/9d/bb/52edc8e078cf062ed749248f1454e9e5cfd09979baadb830b3940e522015/tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:adb4e308eb64380dc70fa30493e21c93475eaa11669dea313b6bbf8210bfd013", size = 1244691, upload-time = "2025-08-08T23:57:42.251Z" }, - { url = "https://files.pythonhosted.org/packages/60/d9/884b6cd7ae2570ecdcaffa02b528522b18fef1cbbfdbcaa73799807d0d3b/tiktoken-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:ece6b76bfeeb61a125c44bbefdfccc279b5288e6007fbedc0d32bfec602df2f2", size = 884392, upload-time = "2025-08-08T23:57:43.628Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9e/eceddeffc169fc75fe0fd4f38471309f11cb1906f9b8aa39be4f5817df65/tiktoken-0.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fd9e6b23e860973cf9526544e220b223c60badf5b62e80a33509d6d40e6c8f5d", size = 1055199, upload-time = "2025-08-08T23:57:45.076Z" }, - { url = "https://files.pythonhosted.org/packages/4f/cf/5f02bfefffdc6b54e5094d2897bc80efd43050e5b09b576fd85936ee54bf/tiktoken-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76d53cee2da71ee2731c9caa747398762bda19d7f92665e882fef229cb0b5b", size = 996655, upload-time = "2025-08-08T23:57:46.304Z" }, - { url = "https://files.pythonhosted.org/packages/65/8e/c769b45ef379bc360c9978c4f6914c79fd432400a6733a8afc7ed7b0726a/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef72aab3ea240646e642413cb363b73869fed4e604dcfd69eec63dc54d603e8", size = 1128867, upload-time = "2025-08-08T23:57:47.438Z" }, - { url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd", size = 1183308, upload-time = "2025-08-08T23:57:48.566Z" }, - { url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e", size = 1244301, upload-time = "2025-08-08T23:57:49.642Z" }, - { url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f", size = 884282, upload-time = "2025-08-08T23:57:50.759Z" }, - { url = "https://files.pythonhosted.org/packages/cc/cd/a9034bcee638716d9310443818d73c6387a6a96db93cbcb0819b77f5b206/tiktoken-0.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a5f3f25ffb152ee7fec78e90a5e5ea5b03b4ea240beed03305615847f7a6ace2", size = 1055339, upload-time = "2025-08-08T23:57:51.802Z" }, - { url = "https://files.pythonhosted.org/packages/f1/91/9922b345f611b4e92581f234e64e9661e1c524875c8eadd513c4b2088472/tiktoken-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dc6e9ad16a2a75b4c4be7208055a1f707c9510541d94d9cc31f7fbdc8db41d8", size = 997080, upload-time = "2025-08-08T23:57:53.442Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9d/49cd047c71336bc4b4af460ac213ec1c457da67712bde59b892e84f1859f/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a0517634d67a8a48fd4a4ad73930c3022629a85a217d256a6e9b8b47439d1e4", size = 1128501, upload-time = "2025-08-08T23:57:54.808Z" }, - { url = "https://files.pythonhosted.org/packages/52/d5/a0dcdb40dd2ea357e83cb36258967f0ae96f5dd40c722d6e382ceee6bba9/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fb4effe60574675118b73c6fbfd3b5868e5d7a1f570d6cc0d18724b09ecf318", size = 1182743, upload-time = "2025-08-08T23:57:56.307Z" }, - { url = "https://files.pythonhosted.org/packages/3b/17/a0fc51aefb66b7b5261ca1314afa83df0106b033f783f9a7bcbe8e741494/tiktoken-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94f984c9831fd32688aef4348803b0905d4ae9c432303087bae370dc1381a2b8", size = 1244057, upload-time = "2025-08-08T23:57:57.628Z" }, - { url = "https://files.pythonhosted.org/packages/50/79/bcf350609f3a10f09fe4fc207f132085e497fdd3612f3925ab24d86a0ca0/tiktoken-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2177ffda31dec4023356a441793fed82f7af5291120751dee4d696414f54db0c", size = 883901, upload-time = "2025-08-08T23:57:59.359Z" }, + { url = "https://files.pythonhosted.org/packages/89/b3/2cb7c17b6c4cf8ca983204255d3f1d95eda7213e247e6947a0ee2c747a2c/tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3de02f5a491cfd179aec916eddb70331814bd6bf764075d39e21d5862e533970", size = 1051991, upload-time = "2025-10-06T20:21:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/27/0f/df139f1df5f6167194ee5ab24634582ba9a1b62c6b996472b0277ec80f66/tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6cfb6d9b7b54d20af21a912bfe63a2727d9cfa8fbda642fd8322c70340aad16", size = 995798, upload-time = "2025-10-06T20:21:35.579Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5d/26a691f28ab220d5edc09b9b787399b130f24327ef824de15e5d85ef21aa/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:cde24cdb1b8a08368f709124f15b36ab5524aac5fa830cc3fdce9c03d4fb8030", size = 1129865, upload-time = "2025-10-06T20:21:36.675Z" }, + { url = "https://files.pythonhosted.org/packages/b2/94/443fab3d4e5ebecac895712abd3849b8da93b7b7dec61c7db5c9c7ebe40c/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6de0da39f605992649b9cfa6f84071e3f9ef2cec458d08c5feb1b6f0ff62e134", size = 1152856, upload-time = "2025-10-06T20:21:37.873Z" }, + { url = "https://files.pythonhosted.org/packages/54/35/388f941251b2521c70dd4c5958e598ea6d2c88e28445d2fb8189eecc1dfc/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6faa0534e0eefbcafaccb75927a4a380463a2eaa7e26000f0173b920e98b720a", size = 1195308, upload-time = "2025-10-06T20:21:39.577Z" }, + { url = "https://files.pythonhosted.org/packages/f8/00/c6681c7f833dd410576183715a530437a9873fa910265817081f65f9105f/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:82991e04fc860afb933efb63957affc7ad54f83e2216fe7d319007dab1ba5892", size = 1255697, upload-time = "2025-10-06T20:21:41.154Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d2/82e795a6a9bafa034bf26a58e68fe9a89eeaaa610d51dbeb22106ba04f0a/tiktoken-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:6fb2995b487c2e31acf0a9e17647e3b242235a20832642bb7a9d1a181c0c1bb1", size = 879375, upload-time = "2025-10-06T20:21:43.201Z" }, + { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, + { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, ] [[package]] diff --git a/libs/standard-tests/README.md b/libs/standard-tests/README.md index b30f9fdbf2c..0f8b79eae76 100644 --- a/libs/standard-tests/README.md +++ b/libs/standard-tests/README.md @@ -1,32 +1,43 @@ -# langchain-tests +# πŸ¦œοΈπŸ”— langchain-tests -This is a testing library for LangChain integrations. It contains the base classes for -a standard set of tests. +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-tests?label=%20)](https://pypi.org/project/langchain-tests/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-tests)](https://opensource.org/licenses/MIT) +[![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-tests)](https://pypistats.org/packages/langchain-tests) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) -## Installation +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). -We encourage pinning your version to a specific version in order to avoid breaking -your CI when we publish new tests. We recommend upgrading to the latest version -periodically to make sure you have the latest tests. - -Not pinning your version will ensure you always have the latest tests, but it may -also break your CI if we introduce tests that your integration doesn't pass. - -Pip: +## Quick Install ```bash -pip install -U langchain-tests +pip install langchain-tests ``` -uv: +## πŸ€” What is this? -```bash -uv add langchain-tests -``` +This is a testing library for LangChain integrations. It contains the base classes for a standard set of tests. + +## πŸ“– Documentation + +For full documentation, see the [API reference](https://reference.langchain.com/python/langchain_tests/). + +## πŸ“• Releases & Versioning + +See our [Releases](https://docs.langchain.com/oss/python/release-policy) and [Versioning](https://docs.langchain.com/oss/python/versioning) policies. + +We encourage pinning your version to a specific version in order to avoid breaking your CI when we publish new tests. We recommend upgrading to the latest version periodically to make sure you have the latest tests. + +Not pinning your version will ensure you always have the latest tests, but it may also break your CI if we introduce tests that your integration doesn't pass. + +## πŸ’ Contributing + +As an open-source project in a rapidly developing field, we are extremely open to contributions, whether it be in the form of a new feature, improved infrastructure, or better documentation. + +For detailed information on how to contribute, see the [Contributing Guide](https://docs.langchain.com/oss/python/contributing/overview). ## Usage -To add standard tests to an integration package (e.g., for a ChatModel), you need to create +To add standard tests to an integration package (e.g., for a chat model), you need to create 1. A unit test class that inherits from `ChatModelUnitTests` 2. An integration test class that inherits from `ChatModelIntegrationTests` diff --git a/libs/standard-tests/langchain_tests/__init__.py b/libs/standard-tests/langchain_tests/__init__.py index d2b756dde09..0d06a1c01ba 100644 --- a/libs/standard-tests/langchain_tests/__init__.py +++ b/libs/standard-tests/langchain_tests/__init__.py @@ -1,6 +1,5 @@ -"""Base Test classes for standard testing. +"""Base test classes for standard testing. -To learn how to use these classes, see the -[integration standard testing](https://python.langchain.com/docs/contributing/how_to/integrations/standard_tests/) -guide. +To learn how to use these, see the guide on +[integrating standard tests](https://docs.langchain.com/oss/python/contributing/standard-tests-langchain). """ diff --git a/libs/standard-tests/langchain_tests/integration_tests/base_store.py b/libs/standard-tests/langchain_tests/integration_tests/base_store.py index f4b64c59c7a..391433925e5 100644 --- a/libs/standard-tests/langchain_tests/integration_tests/base_store.py +++ b/libs/standard-tests/langchain_tests/integration_tests/base_store.py @@ -1,6 +1,7 @@ -"""Standard tests for the BaseStore abstraction. +"""Standard tests for the `BaseStore` abstraction. -We don't recommend implementing externally managed BaseStore abstractions at this time. +We don't recommend implementing externally managed `BaseStore` abstractions at this +time. """ from abc import abstractmethod @@ -16,9 +17,9 @@ V = TypeVar("V") class BaseStoreSyncTests(BaseStandardTests, Generic[V]): - """Test suite for checking the key-value API of a BaseStore. + """Test suite for checking the key-value API of a `BaseStore`. - This test suite verifies the basic key-value API of a BaseStore. + This test suite verifies the basic key-value API of a `BaseStore`. The test suite is designed for synchronous key-value stores. @@ -162,9 +163,9 @@ class BaseStoreSyncTests(BaseStandardTests, Generic[V]): class BaseStoreAsyncTests(BaseStandardTests, Generic[V]): - """Test suite for checking the key-value API of a BaseStore. + """Test suite for checking the key-value API of a `BaseStore`. - This test suite verifies the basic key-value API of a BaseStore. + This test suite verifies the basic key-value API of a `BaseStore`. The test suite is designed for synchronous key-value stores. diff --git a/libs/standard-tests/langchain_tests/integration_tests/cache.py b/libs/standard-tests/langchain_tests/integration_tests/cache.py index ea2b6a3ff60..24655082f3c 100644 --- a/libs/standard-tests/langchain_tests/integration_tests/cache.py +++ b/libs/standard-tests/langchain_tests/integration_tests/cache.py @@ -1,6 +1,7 @@ -"""Standard tests for the BaseCache abstraction. +"""Standard tests for the `BaseCache` abstraction. -We don't recommend implementing externally managed BaseCache abstractions at this time. +We don't recommend implementing externally managed `BaseCache` abstractions at this +time. """ from abc import abstractmethod @@ -13,7 +14,7 @@ from langchain_tests.base import BaseStandardTests class SyncCacheTestSuite(BaseStandardTests): - """Test suite for checking the BaseCache API of a caching layer for LLMs. + """Test suite for checking the `BaseCache` API of a caching layer for LLMs. This test suite verifies the basic caching API of a caching layer for LLMs. @@ -40,7 +41,7 @@ class SyncCacheTestSuite(BaseStandardTests): return "Sample LLM string configuration." def get_sample_generation(self) -> Generation: - """Return a sample Generation object for testing.""" + """Return a sample `Generation` object for testing.""" return Generation( text="Sample generated text.", generation_info={"reason": "test"}, @@ -65,8 +66,8 @@ class SyncCacheTestSuite(BaseStandardTests): This test should follow a test that updates the cache. - This just verifies that the fixture is set up properly to be empty - after each test. + This just verifies that the fixture is set up properly to be empty after each + test. """ assert ( cache.lookup(self.get_sample_prompt(), self.get_sample_llm_string()) is None @@ -94,7 +95,7 @@ class SyncCacheTestSuite(BaseStandardTests): assert cache.lookup(prompt, llm_string) == [generation] def test_update_cache_with_multiple_generations(self, cache: BaseCache) -> None: - """Test updating the cache with multiple Generation objects.""" + """Test updating the cache with multiple `Generation` objects.""" prompt = self.get_sample_prompt() llm_string = self.get_sample_llm_string() generations = [ @@ -106,14 +107,14 @@ class SyncCacheTestSuite(BaseStandardTests): class AsyncCacheTestSuite(BaseStandardTests): - """Test suite for checking the BaseCache API of a caching layer for LLMs. + """Test suite for checking the `BaseCache` API of a caching layer for LLMs. - This test suite verifies the basic caching API of a caching layer for LLMs. + Verifies the basic caching API of a caching layer for LLMs. The test suite is designed for synchronous caching layers. - Implementers should subclass this test suite and provide a fixture - that returns an empty cache for each test. + Implementers should subclass this test suite and provide a fixture that returns an + empty cache for each test. """ @abstractmethod @@ -133,7 +134,7 @@ class AsyncCacheTestSuite(BaseStandardTests): return "Sample LLM string configuration." def get_sample_generation(self) -> Generation: - """Return a sample Generation object for testing.""" + """Return a sample `Generation` object for testing.""" return Generation( text="Sample generated text.", generation_info={"reason": "test"}, @@ -159,8 +160,8 @@ class AsyncCacheTestSuite(BaseStandardTests): This test should follow a test that updates the cache. - This just verifies that the fixture is set up properly to be empty - after each test. + This just verifies that the fixture is set up properly to be empty after each + test. """ assert ( await cache.alookup(self.get_sample_prompt(), self.get_sample_llm_string()) @@ -195,7 +196,7 @@ class AsyncCacheTestSuite(BaseStandardTests): self, cache: BaseCache, ) -> None: - """Test updating the cache with multiple Generation objects.""" + """Test updating the cache with multiple `Generation` objects.""" prompt = self.get_sample_prompt() llm_string = self.get_sample_llm_string() generations = [ diff --git a/libs/standard-tests/langchain_tests/integration_tests/chat_models.py b/libs/standard-tests/langchain_tests/integration_tests/chat_models.py index e20085e1306..93e55949a02 100644 --- a/libs/standard-tests/langchain_tests/integration_tests/chat_models.py +++ b/libs/standard-tests/langchain_tests/integration_tests/chat_models.py @@ -4,6 +4,8 @@ from __future__ import annotations import base64 import json +import os +import warnings from typing import Annotated, Any, Literal from unittest.mock import MagicMock @@ -113,6 +115,15 @@ def _validate_tool_call_message(message: BaseMessage) -> None: assert tool_call["id"] is not None assert tool_call.get("type") == "tool_call" + content_tool_calls = [ + block for block in message.content_blocks if block["type"] == "tool_call" + ] + assert len(content_tool_calls) == 1 + content_tool_call = content_tool_calls[0] + assert content_tool_call["name"] == "magic_function" + assert content_tool_call["args"] == {"input": 3} + assert content_tool_call["id"] is not None + def _validate_tool_call_message_no_args(message: BaseMessage) -> None: assert isinstance(message, AIMessage) @@ -125,6 +136,21 @@ def _validate_tool_call_message_no_args(message: BaseMessage) -> None: assert tool_call.get("type") == "tool_call" +def _get_base64_from_url(url: str) -> str: + user_agent = os.environ.get("LANGCHAIN_TESTS_USER_AGENT") + if not user_agent: + warning_message = ( + "LANGCHAIN_TESTS_USER_AGENT environment variable not set. " + "langchain-tests pulls (CC0 License) audio data from wikimedia.org. " + "Consider setting a user agent to identify your requests. See " + "https://foundation.wikimedia.org/wiki/Policy:Wikimedia_Foundation_User-Agent_Policy" + ) + warnings.warn(warning_message, stacklevel=2) + headers = {"User-Agent": user_agent} if user_agent else {} + httpx_response = httpx.get(url, headers=headers).content + return base64.b64encode(httpx_response).decode("utf-8") + + @tool def unicode_customer(customer_name: str, description: str) -> str: """Tool for creating a customer with Unicode name. @@ -147,7 +173,6 @@ class ChatModelIntegrationTests(ChatModelTests): `chat_model_params` properties to specify what model to test and its initialization parameters. - ```python from typing import Type @@ -173,44 +198,41 @@ class ChatModelIntegrationTests(ChatModelTests): Test subclasses **must** implement the following two properties: - chat_model_class - The chat model class to test, e.g., `ChatParrotLink`. + `chat_model_class`: The chat model class to test, e.g., `ChatParrotLink`. - ```python - @property - def chat_model_class(self) -> Type[ChatParrotLink]: - return ChatParrotLink - ``` + ```python + @property + def chat_model_class(self) -> Type[ChatParrotLink]: + return ChatParrotLink + ``` - chat_model_params - Initialization parameters for the chat model. + `chat_model_params`: Initialization parameters for the chat model. - ```python - @property - def chat_model_params(self) -> dict: - return {"model": "bird-brain-001", "temperature": 0} - ``` + ```python + @property + def chat_model_params(self) -> dict: + return {"model": "bird-brain-001", "temperature": 0} + ``` In addition, test subclasses can control what features are tested (such as tool calling or multi-modality) by selectively overriding the following properties. + Expand to see details: - ??? note "`has_tool_calling`" + ??? info "`has_tool_calling`" Boolean property indicating whether the chat model supports tool calling. By default, this is determined by whether the chat model's `bind_tools` method is overridden. It typically does not need to be overridden on the test class. - Example override: - - ```python + ```python "Example override" @property def has_tool_calling(self) -> bool: return True ``` - ??? note "`tool_choice_value`" + ??? info "`tool_choice_value`" Value to use for tool choice when used in tests. @@ -228,7 +250,7 @@ class ChatModelIntegrationTests(ChatModelTests): return "any" ``` - ??? note "`has_tool_choice`" + ??? info "`has_tool_choice`" Boolean property indicating whether the chat model supports forcing tool calling via a `tool_choice` parameter. @@ -240,15 +262,13 @@ class ChatModelIntegrationTests(ChatModelTests): `tool_choice="any"` will force a tool call, and `tool_choice=` will force a call to a specific tool. - Example override: - - ```python + ```python "Example override" @property def has_tool_choice(self) -> bool: return False ``` - ??? note "`has_structured_output`" + ??? info "`has_structured_output`" Boolean property indicating whether the chat model supports structured output. @@ -257,7 +277,7 @@ class ChatModelIntegrationTests(ChatModelTests): `with_structured_output` method is overridden. If the base implementation is intended to be used, this method should be overridden. - See: https://python.langchain.com/docs/concepts/structured_outputs/ + See: https://docs.langchain.com/oss/python/langchain/structured-output ```python @property @@ -265,10 +285,12 @@ class ChatModelIntegrationTests(ChatModelTests): return True ``` - ??? note "`structured_output_kwargs`" + ??? info "`structured_output_kwargs`" Dict property that can be used to specify additional kwargs for - `with_structured_output`. Useful for testing different models. + `with_structured_output`. + + Useful for testing different models. ```python @property @@ -276,12 +298,12 @@ class ChatModelIntegrationTests(ChatModelTests): return {"method": "function_calling"} ``` - ??? note "`supports_json_mode`" + ??? info "`supports_json_mode`" Boolean property indicating whether the chat model supports JSON mode in `with_structured_output`. - See: https://python.langchain.com/docs/concepts/structured_outputs/#json-mode + See: https://docs.langchain.com/oss/python/langchain/structured-output ```python @property @@ -289,9 +311,10 @@ class ChatModelIntegrationTests(ChatModelTests): return True ``` - ??? note "`supports_image_inputs`" + ??? info "`supports_image_inputs`" Boolean property indicating whether the chat model supports image inputs. + Defaults to `False`. If set to `True`, the chat model will be tested by inputting an @@ -301,7 +324,7 @@ class ChatModelIntegrationTests(ChatModelTests): { "type": "image", "base64": "", - "mime_type": "image/jpeg", # or appropriate mime-type + "mime_type": "image/jpeg", # or appropriate MIME type } ``` @@ -314,7 +337,7 @@ class ChatModelIntegrationTests(ChatModelTests): } ``` - See https://python.langchain.com/docs/concepts/multimodality/ + See https://docs.langchain.com/oss/python/langchain/models#multimodal ```python @property @@ -322,10 +345,12 @@ class ChatModelIntegrationTests(ChatModelTests): return True ``` - ??? note "`supports_image_urls`" + ??? info "`supports_image_urls`" Boolean property indicating whether the chat model supports image inputs from - URLs. Defaults to `False`. + URLs. + + Defaults to `False`. If set to `True`, the chat model will be tested using content blocks of the form @@ -337,7 +362,7 @@ class ChatModelIntegrationTests(ChatModelTests): } ``` - See https://python.langchain.com/docs/concepts/multimodality/ + See https://docs.langchain.com/oss/python/langchain/models#multimodal ```python @property @@ -345,9 +370,10 @@ class ChatModelIntegrationTests(ChatModelTests): return True ``` - ??? note "`supports_pdf_inputs`" + ??? info "`supports_pdf_inputs`" Boolean property indicating whether the chat model supports PDF inputs. + Defaults to `False`. If set to `True`, the chat model will be tested by inputting a @@ -361,7 +387,7 @@ class ChatModelIntegrationTests(ChatModelTests): } ``` - See https://python.langchain.com/docs/concepts/multimodality/ + See https://docs.langchain.com/oss/python/langchain/models#multimodal ```python @property @@ -369,9 +395,10 @@ class ChatModelIntegrationTests(ChatModelTests): return True ``` - ??? note "`supports_audio_inputs`" + ??? info "`supports_audio_inputs`" Boolean property indicating whether the chat model supports audio inputs. + Defaults to `False`. If set to `True`, the chat model will be tested by inputting an @@ -381,11 +408,11 @@ class ChatModelIntegrationTests(ChatModelTests): { "type": "audio", "base64": "", - "mime_type": "audio/wav", # or appropriate mime-type + "mime_type": "audio/wav", # or appropriate MIME type } ``` - See https://python.langchain.com/docs/concepts/multimodality/ + See https://docs.langchain.com/oss/python/langchain/models#multimodal ```python @property @@ -393,19 +420,35 @@ class ChatModelIntegrationTests(ChatModelTests): return True ``` - ??? note "`supports_video_inputs`" + !!! warning + This test downloads audio data from wikimedia.org. You may need to set the + `LANGCHAIN_TESTS_USER_AGENT` environment variable to identify these tests, + e.g., + + ```bash + export LANGCHAIN_TESTS_USER_AGENT="CoolBot/0.0 (https://example.org/coolbot/; coolbot@example.org) generic-library/0.0" + ``` + + Refer to the [Wikimedia Foundation User-Agent Policy](https://foundation.wikimedia.org/wiki/Policy:Wikimedia_Foundation_User-Agent_Policy). + + ??? info "`supports_video_inputs`" Boolean property indicating whether the chat model supports image inputs. - Defaults to `False`. No current tests are written for this feature. - ??? note "`returns_usage_metadata`" + Defaults to `False`. + + No current tests are written for this feature. + + ??? info "`returns_usage_metadata`" Boolean property indicating whether the chat model returns usage metadata - on invoke and streaming responses. Defaults to `True`. + on invoke and streaming responses. - `usage_metadata` is an optional dict attribute on `AIMessage`s that track input - and output tokens. - [See more](https://python.langchain.com/api_reference/core/messages/langchain_core.messages.ai.UsageMetadata.html). + Defaults to `True`. + + `usage_metadata` is an optional dict attribute on `AIMessage` objects that track + input and output tokens. + [See more](https://reference.langchain.com/python/langchain_core/language_models/#langchain_core.messages.ai.UsageMetadata). ```python @property @@ -413,10 +456,10 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - Models supporting `usage_metadata` should also return the name of the - underlying model in the `response_metadata` of the `AIMessage`. + Models supporting `usage_metadata` should also return the name of the underlying + model in the `response_metadata` of the `AIMessage`. - ??? note "`supports_anthropic_inputs`" + ??? info "`supports_anthropic_inputs`" Boolean property indicating whether the chat model supports Anthropic-style inputs. @@ -444,7 +487,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "`supports_image_tool_message`" + ??? info "`supports_image_tool_message`" Boolean property indicating whether the chat model supports a `ToolMessage` that includes image content, e.g. in the OpenAI Chat Completions format: @@ -462,7 +505,7 @@ class ChatModelIntegrationTests(ChatModelTests): ) ``` - as well as the LangChain `ImageContentBlock` format: + ...as well as the LangChain `ImageContentBlock` format: ```python ToolMessage( @@ -487,7 +530,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "`supports_pdf_tool_message`" + ??? info "`supports_pdf_tool_message`" Boolean property indicating whether the chat model supports a `ToolMessage that include PDF content using the LangChain `FileContentBlock` format: @@ -515,14 +558,14 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "`supported_usage_metadata_details`" + ??? info "`supported_usage_metadata_details`" Property controlling what usage metadata details are emitted in both invoke and stream. - `usage_metadata` is an optional dict attribute on `AIMessage`s that track input - and output tokens. - [See more](https://python.langchain.com/api_reference/core/messages/langchain_core.messages.ai.UsageMetadata.html). + `usage_metadata` is an optional dict attribute on `AIMessage` objects that track + input and output tokens. + [See more](https://reference.langchain.com/python/langchain_core/language_models/#langchain_core.messages.ai.UsageMetadata). It includes optional keys `input_token_details` and `output_token_details` that can track usage details associated with special types of tokens, such as @@ -530,7 +573,7 @@ class ChatModelIntegrationTests(ChatModelTests): Only needs to be overridden if these details are supplied. - ??? note "`enable_vcr_tests`" + ??? info "`enable_vcr_tests`" Property controlling whether to enable select tests that rely on [VCR](https://vcrpy.readthedocs.io/en/latest/) caching of HTTP calls, such @@ -563,9 +606,7 @@ class ChatModelIntegrationTests(ChatModelTests): also exclude additional headers, override the default exclusions, or apply other customizations to the VCR configuration. See example below: - ```python - :caption: tests/conftest.py - + ```python title="tests/conftest.py" import pytest from langchain_tests.conftest import ( _base_vcr_config as _base_vcr_config, @@ -600,9 +641,7 @@ class ChatModelIntegrationTests(ChatModelTests): to your VCR fixture and enable this serializer in the config. See example below: - ```python - :caption: tests/conftest.py - + ```python title="tests/conftest.py" import pytest from langchain_tests.conftest import ( CustomPersister, @@ -641,7 +680,6 @@ class ChatModelIntegrationTests(ChatModelTests): def pytest_recording_configure(config: dict, vcr: VCR) -> None: vcr.register_persister(CustomPersister()) vcr.register_serializer("yaml.gz", CustomSerializer()) - ``` You can inspect the contents of the compressed cassettes (e.g., to @@ -651,7 +689,7 @@ class ChatModelIntegrationTests(ChatModelTests): gunzip -k /path/to/tests/cassettes/TestClass_test.yaml.gz ``` - or by using the serializer: + ...or by using the serializer: ```python from langchain_tests.conftest import ( @@ -683,7 +721,6 @@ class ChatModelIntegrationTests(ChatModelTests): You can then commit the cassette to your repository. Subsequent test runs will use the cassette instead of making HTTP calls. - ''' # noqa: E501,D214 @property @@ -696,7 +733,7 @@ class ChatModelIntegrationTests(ChatModelTests): This should pass for all integrations. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, you should make sure your `_generate` method does not raise any exceptions, and that it returns a valid @@ -722,14 +759,14 @@ class ChatModelIntegrationTests(ChatModelTests): a "natively async" implementation, but rather that the model can be used in an async context. - ??? note "Troubleshooting" + ??? question "Troubleshooting" First, debug `langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.test_invoke`. because `ainvoke` has a default implementation that calls `invoke` in an async context. - If that test passes but not this one, you should make sure your _agenerate + If that test passes but not this one, you should make sure your `_agenerate` method does not raise any exceptions, and that it returns a valid `langchain_core.outputs.chat_result.ChatResult` like so: @@ -753,7 +790,7 @@ class ChatModelIntegrationTests(ChatModelTests): a "streaming" implementation, but rather that the model can be used in a streaming context. - ??? note "Troubleshooting" + ??? question "Troubleshooting" First, debug `langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.test_invoke`. @@ -791,7 +828,7 @@ class ChatModelIntegrationTests(ChatModelTests): a "natively async" or "streaming" implementation, but rather that the model can be used in an async streaming context. - ??? note "Troubleshooting" + ??? question "Troubleshooting" First, debug `langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.test_stream`. @@ -830,7 +867,7 @@ class ChatModelIntegrationTests(ChatModelTests): This should pass for all integrations. Tests the model's ability to process multiple prompts in a single batch. - ??? note "Troubleshooting" + ??? question "Troubleshooting" First, debug `langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.test_invoke` @@ -839,7 +876,7 @@ class ChatModelIntegrationTests(ChatModelTests): If that test passes but not this one, you should make sure your `batch` method does not raise any exceptions, and that it returns a list of valid - `langchain_core.messages.AIMessage` objects. + `AIMessage` objects. """ batch_results = model.batch(["Hello", "Hey"]) @@ -858,7 +895,7 @@ class ChatModelIntegrationTests(ChatModelTests): This should pass for all integrations. Tests the model's ability to process multiple prompts in a single batch asynchronously. - ??? note "Troubleshooting" + ??? question "Troubleshooting" First, debug `langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.test_batch` @@ -869,7 +906,7 @@ class ChatModelIntegrationTests(ChatModelTests): If those tests pass but not this one, you should make sure your `abatch` method does not raise any exceptions, and that it returns a list of valid - `langchain_core.messages.AIMessage` objects. + `AIMessage` objects. """ batch_results = await model.abatch(["Hello", "Hey"]) @@ -886,16 +923,17 @@ class ChatModelIntegrationTests(ChatModelTests): """Test to verify that the model can handle multi-turn conversations. This should pass for all integrations. Tests the model's ability to process - a sequence of alternating human and AI messages as context for generating - the next response. + a sequence of alternating `HumanMessage` and `AIMessage` objects as context for + generating the next response. - ??? note "Troubleshooting" + ??? question "Troubleshooting" First, debug `langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.test_invoke` because this test also uses `model.invoke`. If that test passes but not this one, you should verify that: + 1. Your model correctly processes the message history 2. The model maintains appropriate context from previous messages 3. The response is a valid `langchain_core.messages.AIMessage` @@ -920,7 +958,7 @@ class ChatModelIntegrationTests(ChatModelTests): a sequence of double-system, double-human, and double-ai messages as context for generating the next response. - ??? note "Troubleshooting" + ??? question "Troubleshooting" First, debug `langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.test_invoke` @@ -931,8 +969,9 @@ class ChatModelIntegrationTests(ChatModelTests): because this test is the "basic case" without double messages. If that test passes those but not this one, you should verify that: + 1. Your model API can handle double messages, or the integration should - merge messages before sending them to the API. + merge messages before sending them to the API. 2. The response is a valid `langchain_core.messages.AIMessage` """ @@ -956,9 +995,9 @@ class ChatModelIntegrationTests(ChatModelTests): """Test to verify that the model returns correct usage metadata. This test is optional and should be skipped if the model does not return - usage metadata (see Configuration below). + usage metadata (see configuration below). - !!! warning "Behavior changed in 0.3.17" + !!! warning "Behavior changed in `langchain-tests` 0.3.17" Additionally check for the presence of `model_name` in the response metadata, which is needed for usage tracking in callback handlers. @@ -977,9 +1016,10 @@ class ChatModelIntegrationTests(ChatModelTests): ``` This test can also check the format of specific kinds of usage metadata - based on the `supported_usage_metadata_details` property. This property - should be configured as follows with the types of tokens that the model - supports tracking: + based on the `supported_usage_metadata_details` property. + + This property should be configured as follows with the types of tokens that + the model supports tracking: ```python class TestMyChatModelIntegration(ChatModelIntegrationTests): @@ -1003,11 +1043,11 @@ class ChatModelIntegrationTests(ChatModelTests): } ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, first verify that your model returns `langchain_core.messages.ai.UsageMetadata` dicts - attached to the returned AIMessage object in `_generate`: + attached to the returned `AIMessage` object in `_generate`: ```python return ChatResult( @@ -1131,7 +1171,7 @@ class ChatModelIntegrationTests(ChatModelTests): Test to verify that the model returns correct usage metadata in streaming mode. - !!! warning "Behavior changed in 0.3.17" + !!! warning "Behavior changed in `langchain-tests` 0.3.17" Additionally check for the presence of `model_name` in the response metadata, which is needed for usage tracking in callback handlers. @@ -1149,9 +1189,10 @@ class ChatModelIntegrationTests(ChatModelTests): ``` This test can also check the format of specific kinds of usage metadata - based on the `supported_usage_metadata_details` property. This property - should be configured as follows with the types of tokens that the model - supports tracking: + based on the `supported_usage_metadata_details` property. + + This property should be configured as follows with the types of tokens that + the model supports tracking: ```python class TestMyChatModelIntegration(ChatModelIntegrationTests): @@ -1175,11 +1216,11 @@ class ChatModelIntegrationTests(ChatModelTests): } ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, first verify that your model yields `langchain_core.messages.ai.UsageMetadata` dicts - attached to the returned AIMessage object in `_stream` + attached to the returned `AIMessage` object in `_stream` that sum up to the total usage metadata. Note that `input_tokens` should only be included on one of the chunks @@ -1297,7 +1338,7 @@ class ChatModelIntegrationTests(ChatModelTests): This should pass for all integrations. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that the function signature for `_generate` (as well as `_stream` and async variants) accepts the `stop` parameter: @@ -1325,6 +1366,7 @@ class ChatModelIntegrationTests(ChatModelTests): result = custom_model.invoke("hi") assert isinstance(result, AIMessage) + @pytest.mark.parametrize("model", [{}, {"output_version": "v1"}], indirect=True) def test_tool_calling(self, model: BaseChatModel) -> None: """Test that the model generates tool calls. @@ -1332,11 +1374,11 @@ class ChatModelIntegrationTests(ChatModelTests): set to `False`. This test is optional and should be skipped if the model does not support - tool calling (see Configuration below). + tool calling (see configuration below). ??? note "Configuration" - To disable tool calling tests, set `has_tool_calling` to False in your + To disable tool calling tests, set `has_tool_calling` to `False` in your test class: ```python @@ -1346,7 +1388,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that `bind_tools` is implemented to correctly translate LangChain tool objects into the appropriate schema for your @@ -1394,11 +1436,11 @@ class ChatModelIntegrationTests(ChatModelTests): set to `False`. This test is optional and should be skipped if the model does not support - tool calling (see Configuration below). + tool calling (see configuration below). ??? note "Configuration" - To disable tool calling tests, set `has_tool_calling` to False in your + To disable tool calling tests, set `has_tool_calling` to `False` in your test class: ```python @@ -1408,7 +1450,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that `bind_tools` is implemented to correctly translate LangChain tool objects into the appropriate schema for your @@ -1457,11 +1499,11 @@ class ChatModelIntegrationTests(ChatModelTests): on the test class is set to `False`. This test is optional and should be skipped if the model does not support - tool calling (see Configuration below). + tool calling (see configuration below). ??? note "Configuration" - To disable tool calling tests, set `has_tool_calling` to False in your + To disable tool calling tests, set `has_tool_calling` to `False` in your test class: ```python @@ -1471,7 +1513,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that `bind_tools` is implemented to correctly translate LangChain tool objects into the appropriate schema for your @@ -1527,11 +1569,11 @@ class ChatModelIntegrationTests(ChatModelTests): with messages generated from providers following OpenAI format. This test should be skipped if the model does not support tool calling - (see Configuration below). + (see configuration below). ??? note "Configuration" - To disable tool calling tests, set `has_tool_calling` to False in your + To disable tool calling tests, set `has_tool_calling` to `False` in your test class: ```python @@ -1541,7 +1583,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that: @@ -1616,11 +1658,11 @@ class ChatModelIntegrationTests(ChatModelTests): ``` This test should be skipped if the model does not support tool calling - (see Configuration below). + (see configuration below). ??? note "Configuration" - To disable tool calling tests, set `has_tool_calling` to False in your + To disable tool calling tests, set `has_tool_calling` to `False` in your test class: ```python @@ -1630,7 +1672,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that: @@ -1696,11 +1738,11 @@ class ChatModelIntegrationTests(ChatModelTests): test class is set to `False`. This test is optional and should be skipped if the model does not support - tool calling (see Configuration below). + tool calling (see configuration below). ??? note "Configuration" - To disable tool calling tests, set `has_tool_choice` to False in your + To disable tool calling tests, set `has_tool_choice` to `False` in your test class: ```python @@ -1710,7 +1752,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check whether the `test_tool_calling` test is passing. If it is not, refer to the troubleshooting steps in that test first. @@ -1748,11 +1790,11 @@ class ChatModelIntegrationTests(ChatModelTests): is set to `False`. This test is optional and should be skipped if the model does not support - tool calling (see Configuration below). + tool calling (see configuration below). ??? note "Configuration" - To disable tool calling tests, set `has_tool_calling` to False in your + To disable tool calling tests, set `has_tool_calling` to `False` in your test class: ```python @@ -1762,7 +1804,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that `bind_tools` is implemented to correctly translate LangChain tool objects into the appropriate schema for your @@ -1821,11 +1863,11 @@ class ChatModelIntegrationTests(ChatModelTests): to the model. This test is optional and should be skipped if the model does not support - tool calling (see Configuration below). + tool calling (see configuration below). ??? note "Configuration" - To disable tool calling tests, set `has_tool_calling` to False in your + To disable tool calling tests, set `has_tool_calling` to `False` in your test class: ```python @@ -1835,7 +1877,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that the `status` field on `ToolMessage` objects is either ignored or passed to the model appropriately. @@ -1882,11 +1924,11 @@ class ChatModelIntegrationTests(ChatModelTests): - `HumanMessage` with string content (a follow-up question). This test should be skipped if the model does not support tool calling - (see Configuration below). + (see configuration below). ??? note "Configuration" - To disable tool calling tests, set `has_tool_calling` to False in your + To disable tool calling tests, set `has_tool_calling` to `False` in your test class: ```python @@ -1896,7 +1938,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" This test uses [a utility function](https://python.langchain.com/api_reference/core/utils/langchain_core.utils.function_calling.tool_example_to_messages.html). in `langchain_core` to generate a sequence of messages representing @@ -1943,7 +1985,7 @@ class ChatModelIntegrationTests(ChatModelTests): """Test to verify structured output is generated both on invoke and stream. This test is optional and should be skipped if the model does not support - structured output (see Configuration below). + structured output (see configuration below). ??? note "Configuration" @@ -1960,7 +2002,7 @@ class ChatModelIntegrationTests(ChatModelTests): By default, `has_structured_output` is True if a model overrides the `with_structured_output` or `bind_tools` methods. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, ensure that the model's `bind_tools` method properly handles both JSON Schema and Pydantic V2 models. @@ -2026,7 +2068,7 @@ class ChatModelIntegrationTests(ChatModelTests): """Test to verify structured output is generated both on invoke and stream. This test is optional and should be skipped if the model does not support - structured output (see Configuration below). + structured output (see configuration below). ??? note "Configuration" @@ -2043,7 +2085,7 @@ class ChatModelIntegrationTests(ChatModelTests): By default, `has_structured_output` is True if a model overrides the `with_structured_output` or `bind_tools` methods. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, ensure that the model's `bind_tools` method properly handles both JSON Schema and Pydantic V2 models. @@ -2108,7 +2150,7 @@ class ChatModelIntegrationTests(ChatModelTests): `pydantic.v1.BaseModel` is available in the Pydantic 2 package. This test is optional and should be skipped if the model does not support - structured output (see Configuration below). + structured output (see configuration below). ??? note "Configuration" @@ -2125,7 +2167,7 @@ class ChatModelIntegrationTests(ChatModelTests): By default, `has_structured_output` is True if a model overrides the `with_structured_output` or `bind_tools` methods. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, ensure that the model's `bind_tools` method properly handles both JSON Schema and Pydantic V1 models. @@ -2174,7 +2216,7 @@ class ChatModelIntegrationTests(ChatModelTests): parameters. This test is optional and should be skipped if the model does not support - structured output (see Configuration below). + structured output (see configuration below). ??? note "Configuration" @@ -2191,7 +2233,7 @@ class ChatModelIntegrationTests(ChatModelTests): By default, `has_structured_output` is True if a model overrides the `with_structured_output` or `bind_tools` methods. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, ensure that the model's `bind_tools` method properly handles Pydantic V2 models with optional parameters. @@ -2246,11 +2288,11 @@ class ChatModelIntegrationTests(ChatModelTests): """Test structured output via [JSON mode.](https://python.langchain.com/docs/concepts/structured_outputs/#json-mode). This test is optional and should be skipped if the model does not support - the JSON mode feature (see Configuration below). + the JSON mode feature (see configuration below). ??? note "Configuration" - To disable this test, set `supports_json_mode` to False in your + To disable this test, set `supports_json_mode` to `False` in your test class: ```python @@ -2260,7 +2302,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" See example implementation of `with_structured_output` here: https://python.langchain.com/api_reference/_modules/langchain_openai/chat_models/base.html#BaseChatOpenAI.with_structured_output @@ -2305,7 +2347,7 @@ class ChatModelIntegrationTests(ChatModelTests): def test_pdf_inputs(self, model: BaseChatModel) -> None: """Test that the model can process PDF inputs. - This test should be skipped (see Configuration below) if the model does not + This test should be skipped (see configuration below) if the model does not support PDF inputs. These will take the shape of the LangChain `FileContentBlock`: @@ -2334,7 +2376,7 @@ class ChatModelIntegrationTests(ChatModelTests): ??? note "Configuration" - To disable this test, set `supports_pdf_inputs` to False in your + To disable this test, set `supports_pdf_inputs` to `False` in your test class: ```python @@ -2344,11 +2386,11 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that the model can correctly handle messages with pdf content blocks, including base64-encoded files. Otherwise, set - the `supports_pdf_inputs` property to False. + the `supports_pdf_inputs` property to `False`. """ if not self.supports_pdf_inputs: @@ -2393,7 +2435,7 @@ class ChatModelIntegrationTests(ChatModelTests): def test_audio_inputs(self, model: BaseChatModel) -> None: """Test that the model can process audio inputs. - This test should be skipped (see Configuration below) if the model does not + This test should be skipped (see configuration below) if the model does not support audio inputs. These will take the shape of the LangChain `AudioContentBlock`: @@ -2401,7 +2443,7 @@ class ChatModelIntegrationTests(ChatModelTests): { "type": "audio", "base64": "", - "mime_type": "audio/wav", # or appropriate mime-type + "mime_type": "audio/wav", # or appropriate MIME type } ``` @@ -2418,9 +2460,19 @@ class ChatModelIntegrationTests(ChatModelTests): } ``` + Note: this test downloads audio data from wikimedia.org. You may need to set + the `LANGCHAIN_TESTS_USER_AGENT` environment variable to identify these + requests, e.g., + + ```bash + export LANGCHAIN_TESTS_USER_AGENT="CoolBot/0.0 (https://example.org/coolbot/; coolbot@example.org) generic-library/0.0" + ``` + + Refer to the [Wikimedia Foundation User-Agent Policy](https://foundation.wikimedia.org/wiki/Policy:Wikimedia_Foundation_User-Agent_Policy). + ??? note "Configuration" - To disable this test, set `supports_audio_inputs` to False in your + To disable this test, set `supports_audio_inputs` to `False` in your test class: ```python @@ -2430,18 +2482,20 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that the model can correctly handle messages with audio content blocks, specifically base64-encoded files. Otherwise, - set the `supports_audio_inputs` property to False. + set the `supports_audio_inputs` property to `False`. - """ + """ # noqa: E501 if not self.supports_audio_inputs: pytest.skip("Model does not support audio inputs.") - url = "https://upload.wikimedia.org/wikipedia/commons/3/3d/Alcal%C3%A1_de_Henares_%28RPS_13-04-2024%29_canto_de_ruise%C3%B1or_%28Luscinia_megarhynchos%29_en_el_Soto_del_Henares.wav" - audio_data = base64.b64encode(httpx.get(url).content).decode("utf-8") + # https://commons.wikimedia.org/wiki/File:Northern_Flicker_202280456.wav + # License: CC0 1.0 Universal + url = "https://upload.wikimedia.org/wikipedia/commons/6/6a/Northern_Flicker_202280456.wav" + audio_data = _get_base64_from_url(url) message = HumanMessage( [ @@ -2476,7 +2530,7 @@ class ChatModelIntegrationTests(ChatModelTests): def test_image_inputs(self, model: BaseChatModel) -> None: """Test that the model can process image inputs. - This test should be skipped (see Configuration below) if the model does not + This test should be skipped (see configuration below) if the model does not support image inputs. These will take the shape of the LangChain `ImageContentBlock`: @@ -2484,7 +2538,7 @@ class ChatModelIntegrationTests(ChatModelTests): { "type": "image", "base64": "", - "mime_type": "image/jpeg", # or appropriate mime-type + "mime_type": "image/jpeg", # or appropriate MIME type } ``` @@ -2503,7 +2557,7 @@ class ChatModelIntegrationTests(ChatModelTests): See https://python.langchain.com/docs/concepts/multimodality/ - If the property `supports_image_urls` is set to True, the test will also + If the property `supports_image_urls` is set to `True`, the test will also check that we can process content blocks of the form: ```python @@ -2515,7 +2569,7 @@ class ChatModelIntegrationTests(ChatModelTests): ??? note "Configuration" - To disable this test, set `supports_image_inputs` to False in your + To disable this test, set `supports_image_inputs` to `False` in your test class: ```python @@ -2530,26 +2584,26 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that the model can correctly handle messages with image content blocks, including base64-encoded images. Otherwise, set - the `supports_image_inputs` property to False. + the `supports_image_inputs` property to `False`. """ if not self.supports_image_inputs: pytest.skip("Model does not support image message.") - image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + image_url = "https://raw.githubusercontent.com/langchain-ai/docs/4d11d08b6b0e210bd456943f7a22febbd168b543/src/images/agentic-rag-output.png" image_data = base64.b64encode(httpx.get(image_url).content).decode("utf-8") # OpenAI CC format, base64 data message = HumanMessage( content=[ - {"type": "text", "text": "describe the weather in this image"}, + {"type": "text", "text": "Give a concise description of this image."}, { "type": "image_url", - "image_url": {"url": f"data:image/jpeg;base64,{image_data}"}, + "image_url": {"url": f"data:image/png;base64,{image_data}"}, }, ], ) @@ -2558,11 +2612,11 @@ class ChatModelIntegrationTests(ChatModelTests): # Standard LangChain format, base64 data message = HumanMessage( content=[ - {"type": "text", "text": "describe the weather in this image"}, + {"type": "text", "text": "Give a concise description of this image."}, { "type": "image", "base64": image_data, - "mime_type": "image/jpeg", + "mime_type": "image/png", }, ], ) @@ -2572,7 +2626,10 @@ class ChatModelIntegrationTests(ChatModelTests): if self.supports_image_urls: message = HumanMessage( content=[ - {"type": "text", "text": "describe the weather in this image"}, + { + "type": "text", + "text": "Give a concise description of this image.", + }, { "type": "image", "url": image_url, @@ -2618,11 +2675,11 @@ class ChatModelIntegrationTests(ChatModelTests): ``` This test can be skipped by setting the `supports_image_tool_message` property - to False (see Configuration below). + to False (see configuration below). ??? note "Configuration" - To disable this test, set `supports_image_tool_message` to False in your + To disable this test, set `supports_image_tool_message` to `False` in your test class: ```python @@ -2632,7 +2689,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that the model can correctly handle messages with image content blocks in `ToolMessage` objects, including base64-encoded @@ -2643,7 +2700,7 @@ class ChatModelIntegrationTests(ChatModelTests): if not self.supports_image_tool_message: pytest.skip("Model does not support image tool message.") - image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + image_url = "https://raw.githubusercontent.com/langchain-ai/docs/4d11d08b6b0e210bd456943f7a22febbd168b543/src/images/agentic-rag-output.png" image_data = base64.b64encode(httpx.get(image_url).content).decode("utf-8") # OpenAI CC format, base64 data @@ -2651,7 +2708,7 @@ class ChatModelIntegrationTests(ChatModelTests): content=[ { "type": "image_url", - "image_url": {"url": f"data:image/jpeg;base64,{image_data}"}, + "image_url": {"url": f"data:image/png;base64,{image_data}"}, }, ], tool_call_id="1", @@ -2664,7 +2721,7 @@ class ChatModelIntegrationTests(ChatModelTests): { "type": "image", "base64": image_data, - "mime_type": "image/jpeg", + "mime_type": "image/png", }, ], tool_call_id="1", @@ -2674,7 +2731,8 @@ class ChatModelIntegrationTests(ChatModelTests): for tool_message in [oai_format_message, standard_format_message]: messages = [ HumanMessage( - "get a random image using the tool and describe the weather" + "get a random diagram using the tool and give it a concise " + "description" ), AIMessage( [], @@ -2717,11 +2775,11 @@ class ChatModelIntegrationTests(ChatModelTests): ``` This test can be skipped by setting the `supports_pdf_tool_message` property - to False (see Configuration below). + to False (see configuration below). ??? note "Configuration" - To disable this test, set `supports_pdf_tool_message` to False in your + To disable this test, set `supports_pdf_tool_message` to `False` in your test class: ```python @@ -2731,7 +2789,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that the model can correctly handle messages with PDF content blocks in `ToolMessage` objects, specifically @@ -2800,7 +2858,7 @@ class ChatModelIntegrationTests(ChatModelTests): ) ``` - as well as `HumanMessage` objects containing `tool_result` content blocks: + ...as well as `HumanMessage` objects containing `tool_result` content blocks: ```python HumanMessage( @@ -2827,7 +2885,7 @@ class ChatModelIntegrationTests(ChatModelTests): ??? note "Configuration" - To disable this test, set `supports_anthropic_inputs` to False in your + To disable this test, set `supports_anthropic_inputs` to `False` in your test class: ```python @@ -2837,7 +2895,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that: @@ -2849,7 +2907,7 @@ class ChatModelIntegrationTests(ChatModelTests): handled. Otherwise, if Anthropic tool call and result formats are not supported, - set the `supports_anthropic_inputs` property to False. + set the `supports_anthropic_inputs` property to `False`. """ if not self.supports_anthropic_inputs: @@ -2877,14 +2935,14 @@ class ChatModelIntegrationTests(ChatModelTests): }, ] if self.supports_image_inputs: - image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + image_url = "https://raw.githubusercontent.com/langchain-ai/docs/4d11d08b6b0e210bd456943f7a22febbd168b543/src/images/agentic-rag-output.png" image_data = base64.b64encode(httpx.get(image_url).content).decode("utf-8") human_content.append( { "type": "image", "source": { "type": "base64", - "media_type": "image/jpeg", + "media_type": "image/png", "data": image_data, }, } @@ -2897,7 +2955,7 @@ class ChatModelIntegrationTests(ChatModelTests): {"type": "text", "text": "Hmm let me think about that"}, { "type": "tool_use", - "input": {"fav_color": "green"}, + "input": {"fav_color": "purple"}, "id": "foo", "name": "color_picker", }, @@ -2905,7 +2963,7 @@ class ChatModelIntegrationTests(ChatModelTests): tool_calls=[ { "name": "color_picker", - "args": {"fav_color": "green"}, + "args": {"fav_color": "purple"}, "id": "foo", "type": "tool_call", } @@ -2973,7 +3031,7 @@ class ChatModelIntegrationTests(ChatModelTests): If possible, the `name` field should be parsed and passed appropriately to the model. Otherwise, it should be ignored. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that the `name` field on `HumanMessage` objects is either ignored or passed to the model appropriately. @@ -2993,11 +3051,11 @@ class ChatModelIntegrationTests(ChatModelTests): set to `False`. This test is optional and should be skipped if the model does not support - tool calling (see Configuration below). + tool calling (see configuration below). ??? note "Configuration" - To disable tool calling tests, set `has_tool_calling` to False in your + To disable tool calling tests, set `has_tool_calling` to `False` in your test class: ```python @@ -3007,7 +3065,7 @@ class ChatModelIntegrationTests(ChatModelTests): return False ``` - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that `bind_tools` is implemented to correctly translate LangChain tool objects into the appropriate schema for your @@ -3032,7 +3090,7 @@ class ChatModelIntegrationTests(ChatModelTests): @tool def get_weather(location: str) -> str: # noqa: ARG001 - """Call to surf the web.""" + """Get the weather at a location.""" return "It's sunny." llm_with_tools = model.bind_tools([get_weather]) diff --git a/libs/standard-tests/langchain_tests/integration_tests/embeddings.py b/libs/standard-tests/langchain_tests/integration_tests/embeddings.py index 9d556a21581..06e5c799c6b 100644 --- a/libs/standard-tests/langchain_tests/integration_tests/embeddings.py +++ b/libs/standard-tests/langchain_tests/integration_tests/embeddings.py @@ -65,7 +65,7 @@ class EmbeddingsIntegrationTests(EmbeddingsTests): If this test fails, check that: 1. The model will generate a list of lists of floats when calling - `.embed_documents` on a list of strings. + `embed_documents` on a list of strings. 2. The length of each list is the same. """ documents = ["foo", "bar", "baz"] @@ -84,7 +84,7 @@ class EmbeddingsIntegrationTests(EmbeddingsTests): If this test fails, check that: - 1. The model will generate a list of floats when calling `.aembed_query` + 1. The model will generate a list of floats when calling `aembed_query` on a string. 2. The length of the list is consistent across different inputs. """ @@ -106,7 +106,7 @@ class EmbeddingsIntegrationTests(EmbeddingsTests): If this test fails, check that: 1. The model will generate a list of lists of floats when calling - `.aembed_documents` on a list of strings. + `aembed_documents` on a list of strings. 2. The length of each list is the same. """ documents = ["foo", "bar", "baz"] diff --git a/libs/standard-tests/langchain_tests/integration_tests/indexer.py b/libs/standard-tests/langchain_tests/integration_tests/indexer.py index a4b94c1ae85..a3448fabbd0 100644 --- a/libs/standard-tests/langchain_tests/integration_tests/indexer.py +++ b/libs/standard-tests/langchain_tests/integration_tests/indexer.py @@ -1,8 +1,8 @@ """Test suite to check index implementations. -Standard tests for the DocumentIndex abstraction +Standard tests for the `DocumentIndex` abstraction -We don't recommend implementing externally managed DocumentIndex abstractions at this +We don't recommend implementing externally managed `DocumentIndex` abstractions at this time. """ @@ -19,8 +19,8 @@ from langchain_core.indexing.base import DocumentIndex class DocumentIndexerTestSuite(ABC): """Test suite for checking the read-write of a document index. - Implementers should subclass this test suite and provide a fixture - that returns an empty index for each test. + Implementers should subclass this test suite and provide a fixture that returns an + empty index for each test. """ @abstractmethod @@ -29,7 +29,7 @@ class DocumentIndexerTestSuite(ABC): """Get the index.""" def test_upsert_documents_has_no_ids(self, index: DocumentIndex) -> None: - """Verify that there is no parameter called ids in upsert.""" + """Verify that there is no parameter called IDs in upsert.""" signature = inspect.signature(index.upsert) assert "ids" not in signature.parameters @@ -65,7 +65,7 @@ class DocumentIndexerTestSuite(ABC): ) def test_upsert_some_ids(self, index: DocumentIndex) -> None: - """Test an upsert where some docs have ids and some don't.""" + """Test an upsert where some docs have IDs and some don't.""" foo_uuid = str(uuid.UUID(int=7)) documents = [ Document(id=foo_uuid, page_content="foo", metadata={"id": 1}), @@ -172,7 +172,7 @@ class DocumentIndexerTestSuite(ABC): ] def test_delete_no_args(self, index: DocumentIndex) -> None: - """Test delete with no args raises ValueError.""" + """Test delete with no args raises `ValueError`.""" with pytest.raises(ValueError): # noqa: PT011 index.delete() @@ -219,7 +219,7 @@ class AsyncDocumentIndexTestSuite(ABC): """Get the index.""" async def test_upsert_documents_has_no_ids(self, index: DocumentIndex) -> None: - """Verify that there is not parameter called ids in upsert.""" + """Verify that there is not parameter called IDs in upsert.""" signature = inspect.signature(index.upsert) assert "ids" not in signature.parameters @@ -255,7 +255,7 @@ class AsyncDocumentIndexTestSuite(ABC): ) async def test_upsert_some_ids(self, index: DocumentIndex) -> None: - """Test an upsert where some docs have ids and some don't.""" + """Test an upsert where some docs have IDs and some don't.""" foo_uuid = str(uuid.UUID(int=7)) documents = [ Document(id=foo_uuid, page_content="foo", metadata={"id": 1}), @@ -364,7 +364,7 @@ class AsyncDocumentIndexTestSuite(ABC): ] async def test_delete_no_args(self, index: DocumentIndex) -> None: - """Test delete with no args raises ValueError.""" + """Test delete with no args raises `ValueError`.""" with pytest.raises(ValueError): # noqa: PT011 await index.adelete() diff --git a/libs/standard-tests/langchain_tests/integration_tests/retrievers.py b/libs/standard-tests/langchain_tests/integration_tests/retrievers.py index 1736045cca0..00ed2b04146 100644 --- a/libs/standard-tests/langchain_tests/integration_tests/retrievers.py +++ b/libs/standard-tests/langchain_tests/integration_tests/retrievers.py @@ -72,13 +72,13 @@ class RetrieversIntegrationTests(BaseStandardTests): of documents ( of the one set in `num_results_arg_name`) when it is set. - For example, a retriever like + For example, a retriever like... ```python MyRetriever(k=3).invoke("query") ``` - should return 3 documents when invoked with a query. + ...should return 3 documents when invoked with a query. """ params = { @@ -128,13 +128,13 @@ class RetrieversIntegrationTests(BaseStandardTests): of documents (`k` of the one set in `num_results_arg_name`) when it is set. - For example, a retriever like + For example, a retriever like... ```python MyRetriever().invoke("query", k=3) ``` - should return 3 documents when invoked with a query. + ...should return 3 documents when invoked with a query. """ result_1 = retriever.invoke( @@ -169,8 +169,8 @@ class RetrieversIntegrationTests(BaseStandardTests): async def test_ainvoke_returns_documents(self, retriever: BaseRetriever) -> None: """Test ainvoke returns documents. - If ainvoked with the example params, the retriever should return a list of - Documents. + If `ainvoke`'d with the example params, the retriever should return a list of + `Document` objects. See `test_invoke_returns_documents` for more information on troubleshooting. diff --git a/libs/standard-tests/langchain_tests/integration_tests/tools.py b/libs/standard-tests/langchain_tests/integration_tests/tools.py index b485cbe7023..6e45a9b055d 100644 --- a/libs/standard-tests/langchain_tests/integration_tests/tools.py +++ b/libs/standard-tests/langchain_tests/integration_tests/tools.py @@ -12,15 +12,16 @@ class ToolsIntegrationTests(ToolsTests): def test_invoke_matches_output_schema(self, tool: BaseTool) -> None: """Test invoke matches output schema. - If invoked with a ToolCall, the tool should return a valid ToolMessage content. + If invoked with a `ToolCall`, the tool should return a valid `ToolMessage` + content. If you have followed the [custom tool guide](https://python.langchain.com/docs/how_to/custom_tools/), - this test should always pass because ToolCall inputs are handled by the + this test should always pass because `ToolCall` inputs are handled by the `langchain_core.tools.BaseTool` class. If you have not followed this guide, you should ensure that your tool's `invoke` method returns a valid ToolMessage content when it receives - a dict representing a ToolCall as input (as opposed to distinct args). + a `dict` representing a `ToolCall` as input (as opposed to distinct args). """ tool_call = ToolCall( name=tool.name, @@ -44,7 +45,8 @@ class ToolsIntegrationTests(ToolsTests): async def test_async_invoke_matches_output_schema(self, tool: BaseTool) -> None: """Test async invoke matches output schema. - If ainvoked with a ToolCall, the tool should return a valid ToolMessage content. + If ainvoked with a `ToolCall`, the tool should return a valid `ToolMessage` + content. For debugging tips, see `test_invoke_matches_output_schema`. """ @@ -68,23 +70,23 @@ class ToolsIntegrationTests(ToolsTests): assert all(isinstance(c, str | dict) for c in tool_message.content) def test_invoke_no_tool_call(self, tool: BaseTool) -> None: - """Test invoke without ToolCall. + """Test invoke without `ToolCall`. - If invoked without a ToolCall, the tool can return anything + If invoked without a `ToolCall`, the tool can return anything but it shouldn't throw an error. If this test fails, your tool may not be handling the input you defined in `tool_invoke_params_example` correctly, and it's throwing an error. This test doesn't have any checks. It's just to ensure that the tool - doesn't throw an error when invoked with a dictionary of kwargs. + doesn't throw an error when invoked with a `dict` of `**kwargs`. """ tool.invoke(self.tool_invoke_params_example) async def test_async_invoke_no_tool_call(self, tool: BaseTool) -> None: - """Test async invoke without ToolCall. + """Test async invoke without `ToolCall`. - If ainvoked without a ToolCall, the tool can return anything + If ainvoked without a `ToolCall`, the tool can return anything but it shouldn't throw an error. For debugging tips, see `test_invoke_no_tool_call`. diff --git a/libs/standard-tests/langchain_tests/integration_tests/vectorstores.py b/libs/standard-tests/langchain_tests/integration_tests/vectorstores.py index 066d1d30a5b..1ab02b61318 100644 --- a/libs/standard-tests/langchain_tests/integration_tests/vectorstores.py +++ b/libs/standard-tests/langchain_tests/integration_tests/vectorstores.py @@ -1,4 +1,4 @@ -"""Test suite to test vectostores.""" +"""Test suite to test `VectorStore` integrations.""" from abc import abstractmethod @@ -69,7 +69,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): class TestChromaStandard(VectorStoreIntegrationTests): @pytest.fixture() def vectorstore(self) -> Generator[VectorStore, None, None]: # type: ignore - \"\"\"Get an empty vectorstore for unit tests.\"\"\" + \"\"\"Get an empty VectorStore for unit tests.\"\"\" store = Chroma(embedding_function=self.get_embeddings()) try: yield store @@ -95,15 +95,14 @@ class VectorStoreIntegrationTests(BaseStandardTests): !!! note API references for individual test methods include troubleshooting tips. - """ # noqa: E501 @abstractmethod @pytest.fixture def vectorstore(self) -> VectorStore: - """Get the vectorstore class to test. + """Get the `VectorStore` class to test. - The returned vectorstore should be EMPTY. + The returned `VectorStore` should be empty. """ @property @@ -118,7 +117,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): @property def has_get_by_ids(self) -> bool: - """Whether the vector store supports get_by_ids.""" + """Whether the `VectorStore` supports `get_by_ids`.""" return True @staticmethod @@ -137,7 +136,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): ) def test_vectorstore_is_empty(self, vectorstore: VectorStore) -> None: - """Test that the vectorstore is empty. + """Test that the `VectorStore` is empty. ??? note "Troubleshooting" @@ -151,7 +150,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): assert vectorstore.similarity_search("foo", k=1) == [] def test_add_documents(self, vectorstore: VectorStore) -> None: - """Test adding documents into the vectorstore. + """Test adding documents into the `VectorStore`. ??? note "Troubleshooting" @@ -159,7 +158,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): 1. We correctly initialize an empty vector store in the `vectorestore` fixture. - 2. Calling `.similarity_search` for the top `k` similar documents does + 2. Calling `similarity_search` for the top `k` similar documents does not threshold by score. 3. We do not mutate the original document object when adding it to the vector store (e.g., by adding an ID). @@ -185,7 +184,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): ] def test_vectorstore_still_empty(self, vectorstore: VectorStore) -> None: - """Test that the vectorstore is still empty. + """Test that the `VectorStore` is still empty. This test should follow a test that adds documents. @@ -204,7 +203,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): assert vectorstore.similarity_search("foo", k=1) == [] def test_deleting_documents(self, vectorstore: VectorStore) -> None: - """Test deleting documents from the vectorstore. + """Test deleting documents from the `VectorStore`. ??? note "Troubleshooting" @@ -288,7 +287,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): ] def test_add_documents_by_id_with_mutation(self, vectorstore: VectorStore) -> None: - """Test that we can overwrite by ID using add_documents. + """Test that we can overwrite by ID using `add_documents`. ??? note "Troubleshooting" @@ -398,7 +397,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): assert documents == [] def test_add_documents_documents(self, vectorstore: VectorStore) -> None: - """Run add_documents tests. + """Run `add_documents` tests. ??? note "Troubleshooting" @@ -439,7 +438,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): ) def test_add_documents_with_existing_ids(self, vectorstore: VectorStore) -> None: - """Test that add_documents with existing IDs is idempotent. + """Test that `add_documents` with existing IDs is idempotent. ??? note "Troubleshooting" @@ -485,7 +484,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): ) async def test_vectorstore_is_empty_async(self, vectorstore: VectorStore) -> None: - """Test that the vectorstore is empty. + """Test that the `VectorStore` is empty. ??? note "Troubleshooting" @@ -499,7 +498,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): assert await vectorstore.asimilarity_search("foo", k=1) == [] async def test_add_documents_async(self, vectorstore: VectorStore) -> None: - """Test adding documents into the vectorstore. + """Test adding documents into the `VectorStore`. ??? note "Troubleshooting" @@ -536,7 +535,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): async def test_vectorstore_still_empty_async( self, vectorstore: VectorStore ) -> None: - """Test that the vectorstore is still empty. + """Test that the `VectorStore` is still empty. This test should follow a test that adds documents. @@ -555,7 +554,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): assert await vectorstore.asimilarity_search("foo", k=1) == [] async def test_deleting_documents_async(self, vectorstore: VectorStore) -> None: - """Test deleting documents from the vectorstore. + """Test deleting documents from the `VectorStore`. ??? note "Troubleshooting" @@ -643,7 +642,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): async def test_add_documents_by_id_with_mutation_async( self, vectorstore: VectorStore ) -> None: - """Test that we can overwrite by ID using add_documents. + """Test that we can overwrite by ID using `add_documents`. ??? note "Troubleshooting" @@ -754,7 +753,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): async def test_add_documents_documents_async( self, vectorstore: VectorStore ) -> None: - """Run add_documents tests. + """Run `add_documents` tests. ??? note "Troubleshooting" @@ -797,7 +796,7 @@ class VectorStoreIntegrationTests(BaseStandardTests): async def test_add_documents_with_existing_ids_async( self, vectorstore: VectorStore ) -> None: - """Test that add_documents with existing IDs is idempotent. + """Test that `add_documents` with existing IDs is idempotent. ??? note "Troubleshooting" diff --git a/libs/standard-tests/langchain_tests/unit_tests/chat_models.py b/libs/standard-tests/langchain_tests/unit_tests/chat_models.py index 1b99be089b6..895608b3f12 100644 --- a/libs/standard-tests/langchain_tests/unit_tests/chat_models.py +++ b/libs/standard-tests/langchain_tests/unit_tests/chat_models.py @@ -110,7 +110,7 @@ class ChatModelTests(BaseStandardTests): @property def has_tool_calling(self) -> bool: - """(bool) whether the model supports tool calling.""" + """Whether the model supports tool calling.""" return self.chat_model_class.bind_tools is not BaseChatModel.bind_tools @property @@ -120,7 +120,7 @@ class ChatModelTests(BaseStandardTests): @property def has_tool_choice(self) -> bool: - """(bool) whether the model supports tool calling.""" + """Whether the model supports tool calling.""" bind_tools_params = inspect.signature( self.chat_model_class.bind_tools ).parameters @@ -128,7 +128,7 @@ class ChatModelTests(BaseStandardTests): @property def has_structured_output(self) -> bool: - """(bool) whether the chat model supports structured output.""" + """Whether the chat model supports structured output.""" return ( self.chat_model_class.with_structured_output is not BaseChatModel.with_structured_output @@ -136,19 +136,19 @@ class ChatModelTests(BaseStandardTests): @property def structured_output_kwargs(self) -> dict: - """If specified, additional kwargs for with_structured_output.""" + """If specified, additional kwargs for `with_structured_output`.""" return {} @property def supports_json_mode(self) -> bool: - """(bool) whether the chat model supports JSON mode.""" + """Whether the chat model supports JSON mode.""" return False @property def supports_image_inputs(self) -> bool: """Supports image inputs. - (bool) whether the chat model supports image inputs, defaults to + Whether the chat model supports image inputs, defaults to `False`. """ @@ -158,7 +158,7 @@ class ChatModelTests(BaseStandardTests): def supports_image_urls(self) -> bool: """Supports image inputs from URLs. - (bool) whether the chat model supports image inputs from URLs, defaults to + Whether the chat model supports image inputs from URLs, defaults to `False`. """ @@ -166,14 +166,14 @@ class ChatModelTests(BaseStandardTests): @property def supports_pdf_inputs(self) -> bool: - """(bool) whether the chat model supports PDF inputs, defaults to `False`.""" + """Whether the chat model supports PDF inputs, defaults to `False`.""" return False @property def supports_audio_inputs(self) -> bool: """Supports audio inputs. - (bool) whether the chat model supports audio inputs, defaults to `False`. + Whether the chat model supports audio inputs, defaults to `False`. """ return False @@ -182,10 +182,9 @@ class ChatModelTests(BaseStandardTests): def supports_video_inputs(self) -> bool: """Supports video inputs. - (bool) whether the chat model supports video inputs, defaults to `False`. + Whether the chat model supports video inputs, defaults to `False`. No current tests are written for this feature. - """ return False @@ -193,7 +192,7 @@ class ChatModelTests(BaseStandardTests): def returns_usage_metadata(self) -> bool: """Returns usage metadata. - (bool) whether the chat model returns usage metadata on invoke and streaming + Whether the chat model returns usage metadata on invoke and streaming responses. """ @@ -201,16 +200,15 @@ class ChatModelTests(BaseStandardTests): @property def supports_anthropic_inputs(self) -> bool: - """(bool) whether the chat model supports Anthropic-style inputs.""" + """Whether the chat model supports Anthropic-style inputs.""" return False @property def supports_image_tool_message(self) -> bool: """Supports image `ToolMessage` objects. - (bool) whether the chat model supports `ToolMessage` objects that include image + Whether the chat model supports `ToolMessage` objects that include image content. - """ return False @@ -218,20 +216,18 @@ class ChatModelTests(BaseStandardTests): def supports_pdf_tool_message(self) -> bool: """Supports PDF `ToolMessage` objects. - (bool) whether the chat model supports `ToolMessage` objects that include PDF + Whether the chat model supports `ToolMessage` objects that include PDF content. - """ return False @property def enable_vcr_tests(self) -> bool: - """(bool) whether to enable VCR tests for the chat model. + """Whether to enable VCR tests for the chat model. !!! warning See `enable_vcr_tests` dropdown `above ` for more information. - """ return False @@ -252,8 +248,8 @@ class ChatModelTests(BaseStandardTests): ]: """Supported usage metadata details. - (dict) what usage metadata details are emitted in invoke and stream. Only - needs to be overridden if these details are returned by the model. + What usage metadata details are emitted in invoke and stream. Only needs to be + overridden if these details are returned by the model. """ return {"invoke": [], "stream": []} @@ -290,43 +286,41 @@ class ChatModelUnitTests(ChatModelTests): Test subclasses **must** implement the following two properties: - chat_model_class - The chat model class to test, e.g., `ChatParrotLink`. + `chat_model_class`: The chat model class to test, e.g., `ChatParrotLink`. - ```python - @property - def chat_model_class(self) -> Type[ChatParrotLink]: - return ChatParrotLink - ``` - chat_model_params - Initialization parameters for the chat model. + ```python + @property + def chat_model_class(self) -> Type[ChatParrotLink]: + return ChatParrotLink + ``` - ```python - @property - def chat_model_params(self) -> dict: - return {"model": "bird-brain-001", "temperature": 0} - ``` + `chat_model_params`: Initialization parameters for the chat model. + + ```python + @property + def chat_model_params(self) -> dict: + return {"model": "bird-brain-001", "temperature": 0} + ``` In addition, test subclasses can control what features are tested (such as tool calling or multi-modality) by selectively overriding the following properties. + Expand to see details: - ??? note "`has_tool_calling`" + ??? info "`has_tool_calling`" Boolean property indicating whether the chat model supports tool calling. By default, this is determined by whether the chat model's `bind_tools` method is overridden. It typically does not need to be overridden on the test class. - Example override: - - ```python + ```python "Example override" @property def has_tool_calling(self) -> bool: return True ``` - ??? note "`tool_choice_value`" + ??? info "`tool_choice_value`" Value to use for tool choice when used in tests. @@ -343,7 +337,7 @@ class ChatModelUnitTests(ChatModelTests): return "any" ``` - ??? note "`has_tool_choice`" + ??? info "`has_tool_choice`" Boolean property indicating whether the chat model supports forcing tool calling via a `tool_choice` parameter. @@ -355,15 +349,13 @@ class ChatModelUnitTests(ChatModelTests): `tool_choice="any"` will force a tool call, and `tool_choice=` will force a call to a specific tool. - Example override: - - ```python + ```python "Example override" @property def has_tool_choice(self) -> bool: return False ``` - ??? note "`has_structured_output`" + ??? info "`has_structured_output`" Boolean property indicating whether the chat model supports structured output. @@ -372,7 +364,7 @@ class ChatModelUnitTests(ChatModelTests): `with_structured_output` or `bind_tools` methods. If the base implementations are intended to be used, this method should be overridden. - See: https://python.langchain.com/docs/concepts/structured_outputs/ + See: https://docs.langchain.com/oss/python/langchain/structured-output ```python @property @@ -380,10 +372,12 @@ class ChatModelUnitTests(ChatModelTests): return True ``` - ??? note "`structured_output_kwargs`" + ??? info "`structured_output_kwargs`" Dict property that can be used to specify additional kwargs for - `with_structured_output`. Useful for testing different models. + `with_structured_output`. + + Useful for testing different models. ```python @property @@ -391,12 +385,12 @@ class ChatModelUnitTests(ChatModelTests): return {"method": "function_calling"} ``` - ??? note "`supports_json_mode`" + ??? info "`supports_json_mode`" Boolean property indicating whether the chat model supports JSON mode in `with_structured_output`. - See: https://python.langchain.com/docs/concepts/structured_outputs/#json-mode + See: https://docs.langchain.com/oss/python/langchain/structured-output ```python @property @@ -404,9 +398,10 @@ class ChatModelUnitTests(ChatModelTests): return True ``` - ??? note "`supports_image_inputs`" + ??? info "`supports_image_inputs`" Boolean property indicating whether the chat model supports image inputs. + Defaults to `False`. If set to `True`, the chat model will be tested using the LangChain @@ -416,7 +411,7 @@ class ChatModelUnitTests(ChatModelTests): { "type": "image", "base64": "", - "mime_type": "image/jpeg", # or appropriate mime-type + "mime_type": "image/jpeg", # or appropriate MIME type } ``` @@ -429,8 +424,7 @@ class ChatModelUnitTests(ChatModelTests): } ``` - See https://python.langchain.com/docs/concepts/multimodality/ - + See https://docs.langchain.com/oss/python/langchain/models#multimodal ```python @property @@ -438,10 +432,12 @@ class ChatModelUnitTests(ChatModelTests): return True ``` - ??? note "`supports_image_urls`" + ??? info "`supports_image_urls`" Boolean property indicating whether the chat model supports image inputs from - URLs. Defaults to `False`. + URLs. + + Defaults to `False`. If set to `True`, the chat model will be tested using content blocks of the form. @@ -453,8 +449,7 @@ class ChatModelUnitTests(ChatModelTests): } ``` - See https://python.langchain.com/docs/concepts/multimodality/ - + See https://docs.langchain.com/oss/python/langchain/models#multimodal ```python @property @@ -462,9 +457,10 @@ class ChatModelUnitTests(ChatModelTests): return True ``` - ??? note "`supports_pdf_inputs`" + ??? info "`supports_pdf_inputs`" Boolean property indicating whether the chat model supports PDF inputs. + Defaults to `False`. If set to `True`, the chat model will be tested using the LangChain @@ -478,8 +474,7 @@ class ChatModelUnitTests(ChatModelTests): } ``` - See https://python.langchain.com/docs/concepts/multimodality/ - + See https://docs.langchain.com/oss/python/langchain/models#multimodal ```python @property @@ -487,9 +482,10 @@ class ChatModelUnitTests(ChatModelTests): return True ``` - ??? note "`supports_audio_inputs`" + ??? info "`supports_audio_inputs`" Boolean property indicating whether the chat model supports audio inputs. + Defaults to `False`. If set to `True`, the chat model will be tested using the LangChain @@ -499,11 +495,11 @@ class ChatModelUnitTests(ChatModelTests): { "type": "audio", "base64": "", - "mime_type": "audio/wav", # or appropriate mime-type + "mime_type": "audio/wav", # or appropriate MIME type } ``` - See https://python.langchain.com/docs/concepts/multimodality/ + See https://docs.langchain.com/oss/python/langchain/models#multimodal ```python @property @@ -511,19 +507,36 @@ class ChatModelUnitTests(ChatModelTests): return True ``` - ??? note "`supports_video_inputs`" + !!! warning + This test downloads audio data from wikimedia.org. You may need to set the + `LANGCHAIN_TESTS_USER_AGENT` environment variable to identify these tests, + e.g., + + ```bash + export LANGCHAIN_TESTS_USER_AGENT="CoolBot/0.0 (https://example.org/coolbot/; coolbot@example.org) generic-library/0.0" + ``` + + Refer to the [Wikimedia Foundation User-Agent Policy](https://foundation.wikimedia.org/wiki/Policy:Wikimedia_Foundation_User-Agent_Policy). + + ??? info "`supports_video_inputs`" Boolean property indicating whether the chat model supports image inputs. - Defaults to `False`. No current tests are written for this feature. - ??? note "`returns_usage_metadata`" + Defaults to `False`. + + No current tests are written for this feature. + + ??? info "`returns_usage_metadata`" Boolean property indicating whether the chat model returns usage metadata - on invoke and streaming responses. Defaults to `True`. + on invoke and streaming responses. - `usage_metadata` is an optional dict attribute on `AIMessage`s that track + Defaults to `True`. + + `usage_metadata` is an optional dict attribute on `AIMessage` objects that track input and output tokens. - [See more](https://python.langchain.com/api_reference/core/messages/langchain_core.messages.ai.UsageMetadata.html). + + [See more](https://reference.langchain.com/python/langchain_core/language_models/#langchain_core.messages.ai.UsageMetadata). ```python @property @@ -534,7 +547,7 @@ class ChatModelUnitTests(ChatModelTests): Models supporting `usage_metadata` should also return the name of the underlying model in the `response_metadata` of the `AIMessage`. - ??? note "`supports_anthropic_inputs`" + ??? info "`supports_anthropic_inputs`" Boolean property indicating whether the chat model supports Anthropic-style inputs. @@ -562,7 +575,7 @@ class ChatModelUnitTests(ChatModelTests): return False ``` - ??? note "`supports_image_tool_message`" + ??? info "`supports_image_tool_message`" Boolean property indicating whether the chat model supports `ToolMessage` objects that include image content, e.g., @@ -608,7 +621,7 @@ class ChatModelUnitTests(ChatModelTests): return False ``` - ??? note "`supports_pdf_tool_message`" + ??? info "`supports_pdf_tool_message`" Boolean property indicating whether the chat model supports `ToolMessage` objects that include PDF content, i.e., @@ -638,14 +651,14 @@ class ChatModelUnitTests(ChatModelTests): return False ``` - ??? note "`supported_usage_metadata_details`" + ??? info "`supported_usage_metadata_details`" Property controlling what usage metadata details are emitted in both `invoke` and `stream`. - `usage_metadata` is an optional dict attribute on `AIMessage`s that track + `usage_metadata` is an optional dict attribute on `AIMessage` objects that track input and output tokens. - [See more](https://python.langchain.com/api_reference/core/messages/langchain_core.messages.ai.UsageMetadata.html). + [See more](https://reference.langchain.com/python/langchain_core/language_models/#langchain_core.messages.ai.UsageMetadata). It includes optional keys `input_token_details` and `output_token_details` that can track usage details associated with special types of tokens, such as @@ -653,7 +666,7 @@ class ChatModelUnitTests(ChatModelTests): Only needs to be overridden if these details are supplied. - ??? note "`enable_vcr_tests`" + ??? info "`enable_vcr_tests`" Property controlling whether to enable select tests that rely on [VCR](https://vcrpy.readthedocs.io/en/latest/) caching of HTTP calls, such @@ -686,9 +699,7 @@ class ChatModelUnitTests(ChatModelTests): also exclude additional headers, override the default exclusions, or apply other customizations to the VCR configuration. See example below: - ```python - :caption: tests/conftest.py - + ```python title="tests/conftest.py" import pytest from langchain_tests.conftest import ( _base_vcr_config as _base_vcr_config, @@ -723,9 +734,7 @@ class ChatModelUnitTests(ChatModelTests): to your VCR fixture and enable this serializer in the config. See example below: - ```python - :caption: tests/conftest.py - + ```python title="tests/conftest.py" import pytest from langchain_tests.conftest import ( CustomPersister, @@ -773,7 +782,7 @@ class ChatModelUnitTests(ChatModelTests): gunzip -k /path/to/tests/cassettes/TestClass_test.yaml.gz ``` - or by using the serializer: + ...or by using the serializer: ```python from langchain_tests.conftest import ( @@ -806,37 +815,37 @@ class ChatModelUnitTests(ChatModelTests): You can then commit the cassette to your repository. Subsequent test runs will use the cassette instead of making HTTP calls. - Testing initialization from environment variables - Some unit tests may require testing initialization from environment variables. - These tests can be enabled by overriding the `init_from_env_params` - property (see below): + **Testing initialization from environment variables** - ??? note "`init_from_env_params`" + Some unit tests may require testing initialization from environment variables. + These tests can be enabled by overriding the `init_from_env_params` + property (see below). - This property is used in unit tests to test initialization from - environment variables. It should return a tuple of three dictionaries - that specify the environment variables, additional initialization args, - and expected instance attributes to check. + ??? info "`init_from_env_params`" - Defaults to empty dicts. If not overridden, the test is skipped. + This property is used in unit tests to test initialization from + environment variables. It should return a tuple of three dictionaries + that specify the environment variables, additional initialization args, + and expected instance attributes to check. - Example: - ```python - @property - def init_from_env_params(self) -> Tuple[dict, dict, dict]: - return ( - { - "MY_API_KEY": "api_key", - }, - { - "model": "bird-brain-001", - }, - { - "my_api_key": "api_key", - }, - ) - ``` + Defaults to empty dicts. If not overridden, the test is skipped. + Example: + ```python + @property + def init_from_env_params(self) -> Tuple[dict, dict, dict]: + return ( + { + "MY_API_KEY": "api_key", + }, + { + "model": "bird-brain-001", + }, + { + "my_api_key": "api_key", + }, + ) + ``` ''' # noqa: E501,D214 @property @@ -850,16 +859,15 @@ class ChatModelUnitTests(ChatModelTests): def init_from_env_params(self) -> tuple[dict, dict, dict]: """Init from env params. - (tuple) environment variables, additional initialization args, and expected - instance attributes for testing initialization from environment variables. - + Environment variables, additional initialization args, and expected instance + attributes for testing initialization from environment variables. """ return {}, {}, {} def test_init(self) -> None: """Test model initialization. This should pass for all integrations. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, ensure that: @@ -883,7 +891,7 @@ class ChatModelUnitTests(ChatModelTests): Relies on the `init_from_env_params` property. Test is skipped if that property is not set. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, ensure that `init_from_env_params` is specified correctly and that model parameters are properly set from environment @@ -910,7 +918,7 @@ class ChatModelUnitTests(ChatModelTests): This is for backward-compatibility purposes. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, ensure that the model can be initialized with a boolean `streaming` parameter. @@ -936,7 +944,7 @@ class ChatModelUnitTests(ChatModelTests): into `bind_tools`. Test is skipped if the `has_tool_calling` property on the test class is False. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, ensure that the model's `bind_tools` method properly handles Pydantic V2 models. `langchain_core` implements @@ -978,7 +986,7 @@ class ChatModelUnitTests(ChatModelTests): Test is skipped if the `has_structured_output` property on the test class is False. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, ensure that the model's `bind_tools` method properly handles Pydantic V2 models. `langchain_core` implements @@ -1003,7 +1011,7 @@ class ChatModelUnitTests(ChatModelTests): These are used for tracing purposes. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that the model accommodates [standard parameters](https://python.langchain.com/docs/concepts/chat_models/#standard-parameters). @@ -1043,7 +1051,7 @@ class ChatModelUnitTests(ChatModelTests): Test is skipped if the `is_lc_serializable` property on the chat model class is not overwritten to return `True`. - ??? note "Troubleshooting" + ??? question "Troubleshooting" If this test fails, check that the `init_from_env_params` property is correctly set on the test class. diff --git a/libs/standard-tests/langchain_tests/unit_tests/tools.py b/libs/standard-tests/langchain_tests/unit_tests/tools.py index da8e9047c1e..aeeaf1ef768 100644 --- a/libs/standard-tests/langchain_tests/unit_tests/tools.py +++ b/libs/standard-tests/langchain_tests/unit_tests/tools.py @@ -35,8 +35,8 @@ class ToolsTests(BaseStandardTests): def tool_invoke_params_example(self) -> dict: """Returns a dictionary representing the "args" of an example tool call. - This should NOT be a ToolCall dict - it should not - have {"name", "id", "args"} keys. + This should NOT be a `ToolCall` dict - it should not have + `{"name", "id", "args"}` keys. """ return {} @@ -69,9 +69,9 @@ class ToolsUnitTests(ToolsTests): def test_init(self) -> None: """Test init. - Test that the tool can be initialized with :attr:`tool_constructor` and - :attr:`tool_constructor_params`. If this fails, check that the - keyword args defined in :attr:`tool_constructor_params` are valid. + Test that the tool can be initialized with `tool_constructor` and + `tool_constructor_params`. If this fails, check that the + keyword args defined in `tool_constructor_params` are valid. """ if isinstance(self.tool_constructor, BaseTool): tool = self.tool_constructor @@ -117,7 +117,7 @@ class ToolsUnitTests(ToolsTests): If this fails, update the `tool_invoke_params_example` attribute to match the input schema (`args_schema`) of the tool. """ - # this will be a pydantic object + # This will be a Pydantic object input_schema = tool.get_input_schema() assert input_schema(**self.tool_invoke_params_example) diff --git a/libs/standard-tests/pyproject.toml b/libs/standard-tests/pyproject.toml index 68b1d7f3b62..86beacc93cb 100644 --- a/libs/standard-tests/pyproject.toml +++ b/libs/standard-tests/pyproject.toml @@ -3,11 +3,16 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -authors = [{ name = "Erick Friis", email = "erick@langchain.dev" }] +name = "langchain-tests" +description = "Standard tests for LangChain implementations" license = { text = "MIT" } +readme = "README.md" +authors = [] + +version = "1.0.1" requires-python = ">=3.10.0,<4.0.0" dependencies = [ - "langchain-core>=1.0.0a7,<2.0.0", + "langchain-core>=1.0.3,<2.0.0", "pytest>=7.0.0,<9.0.0", "pytest-asyncio>=0.20.0,<2.0.0", "httpx>=0.28.1,<1.0.0", @@ -20,18 +25,15 @@ dependencies = [ "numpy>=1.26.2; python_version<'3.13'", "numpy>=2.1.0; python_version>='3.13'", ] -name = "langchain-tests" -version = "1.0.0a2" -description = "Standard tests for LangChain implementations" -readme = "README.md" [project.urls] -homepage = "https://docs.langchain.com/" -repository = "https://github.com/langchain-ai/langchain/tree/master/libs/standard-tests" -changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-tests%3D%3D1%22" -twitter = "https://x.com/LangChainAI" -slack = "https://www.langchain.com/join-community" -reddit = "https://www.reddit.com/r/LangChain/" +Homepage = "https://docs.langchain.com/" +Documentation = "https://docs.langchain.com/" +Source = "https://github.com/langchain-ai/langchain/tree/master/libs/standard-tests" +Changelog = "https://github.com/langchain-ai/langchain/releases?q=%22langchain-tests%3D%3D1%22" +Twitter = "https://x.com/LangChainAI" +Slack = "https://www.langchain.com/join-community" +Reddit = "https://www.reddit.com/r/LangChain/" [dependency-groups] test = ["langchain-core"] diff --git a/libs/standard-tests/uv.lock b/libs/standard-tests/uv.lock index b8a3d71d5a6..29846a61ab8 100644 --- a/libs/standard-tests/uv.lock +++ b/libs/standard-tests/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.10.0, <4.0.0" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -297,7 +297,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.0.0a8" +version = "1.0.3" source = { editable = "../core" } dependencies = [ { name = "jsonpatch" }, @@ -331,6 +331,7 @@ test = [ { name = "blockbuster", specifier = ">=1.5.18,<1.6.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "grandalf", specifier = ">=0.8.0,<1.0.0" }, + { name = "langchain-model-profiles", directory = "../model-profiles" }, { name = "langchain-tests", directory = "." }, { name = "numpy", marker = "python_full_version < '3.13'", specifier = ">=1.26.4" }, { name = "numpy", marker = "python_full_version >= '3.13'", specifier = ">=2.1.0" }, @@ -347,6 +348,7 @@ test = [ ] test-integration = [] typing = [ + { name = "langchain-model-profiles", directory = "../model-profiles" }, { name = "langchain-text-splitters", directory = "../text-splitters" }, { name = "mypy", specifier = ">=1.18.1,<1.19.0" }, { name = "types-pyyaml", specifier = ">=6.0.12.2,<7.0.0.0" }, @@ -355,7 +357,7 @@ typing = [ [[package]] name = "langchain-tests" -version = "1.0.0a2" +version = "1.0.1" source = { editable = "." } dependencies = [ { name = "httpx" }, @@ -972,7 +974,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.9" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -980,96 +982,123 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/a7/d0d7b3c128948ece6676a6a21b9036e3ca53765d35052dbcc8c303886a44/pydantic-2.12.1.tar.gz", hash = "sha256:0af849d00e1879199babd468ec9db13b956f6608e9250500c1a9d69b6a62824e", size = 815997, upload-time = "2025-10-13T21:00:41.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/f5/69/ce4e60e5e67aa0c339a5dc3391a02b4036545efb6308c54dc4aa9425386f/pydantic-2.12.1-py3-none-any.whl", hash = "sha256:665931f5b4ab40c411439e66f99060d631d1acc58c3d481957b9123343d674d1", size = 460511, upload-time = "2025-10-13T21:00:38.935Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/e9/3916abb671bffb00845408c604ff03480dc8dc273310d8268547a37be0fb/pydantic_core-2.41.3.tar.gz", hash = "sha256:cdebb34b36ad05e8d77b4e797ad38a2a775c2a07a8fa386d4f6943b7778dcd39", size = 457489, upload-time = "2025-10-13T19:34:51.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/8346969d4eef68f385a7cf6d9d18a6a82129177f2ac9ea36cc2cec4a7b3a/pydantic_core-2.41.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1a572d7d06b9fa6efeec32fbcd18c73081af66942b345664669867cf8e69c7b0", size = 2110164, upload-time = "2025-10-13T19:30:43.025Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/7ac0e48368c67c1ce3b34ceae1949c780381ad45ae3662f4e63a3d9a1a51/pydantic_core-2.41.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63d787ea760052585c6bfc34310aa379346f2cec363fe178659664f80421804b", size = 1919153, upload-time = "2025-10-13T19:30:44.783Z" }, + { url = "https://files.pythonhosted.org/packages/62/cb/592daea1d54b935f1f6c335d3c1db3c73207b834ce493fc82042fdb827e8/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa5a2327538f6b3c040604618cd36a960224ad7c22be96717b444c269f1a8b2", size = 1970141, upload-time = "2025-10-13T19:30:46.569Z" }, + { url = "https://files.pythonhosted.org/packages/90/5c/59a2a215ef344e08d3366a05171e0acdc33edc8584e5c22cb968f26598bf/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:947e1c5e79c54e313742c9dc25a439d38c5dcfde14f6a9a9069b3295f190c444", size = 2051479, upload-time = "2025-10-13T19:30:47.966Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/6877045de472cc3333c02f5a782fca6440ca0e012bea9a76b06093733979/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0a1e90642dd6040cfcf509230fb1c3df257f7420d52b5401b3ce164acb0a342", size = 2245684, upload-time = "2025-10-13T19:30:49.68Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/8e65785a723594d4661d559c2d1fca52827f31f32b35b8944794d80da8f0/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f7d4504d7bdce582a2700615d52dbe5f9de4ffab4815431f6da7edf5acc1329", size = 2364241, upload-time = "2025-10-13T19:30:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b4/5949e8df13a19ecc954a92207204d87fe0af5ccb6a31f7c6308d0c810221/pydantic_core-2.41.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7528ff51a26985072291c4170bd1f16f396a46ef845a428ae97bdb01ebaee7f4", size = 2072847, upload-time = "2025-10-13T19:30:52.778Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8c/ba844701bf42418dcc9acd0f3e2d239f6f13fa2aba23c5fd3afdbb955a84/pydantic_core-2.41.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:21b3a07248e481c06c4f208c53402fc143e817ce652a114f0c5d2acfd97b8b91", size = 2185990, upload-time = "2025-10-13T19:30:54.35Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/beb0030df8526d90667a94bdee5323b9a0063fbf3c5099693fddf478b434/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:45b445c09095df0d422e8ef01065f1c0a7424a17b37646b71d857ead6428b084", size = 2150559, upload-time = "2025-10-13T19:30:55.727Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dd/da4bc82999b9e1c8f650c8b2d223ff343a369fbe3a1bcb574b48093f4e07/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:c32474bb2324b574dc57aea40cb415c8ca81b73bc103f5644a15095d5552df8f", size = 2316646, upload-time = "2025-10-13T19:30:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/96/78/714aef0f059922ed3bfedb34befad5049ac78899a7a3bad941b19a28eadf/pydantic_core-2.41.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:91a38e48cdcc17763ac0abcb27c2b5fca47c2bc79ca0821b5211b2adeb06c4d0", size = 2325563, upload-time = "2025-10-13T19:30:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/36/08/78ad17af3d19fc25e4f0e2fc74ddb858b5c7da3ece394527d857b475791d/pydantic_core-2.41.3-cp310-cp310-win32.whl", hash = "sha256:b0947cd92f782cfc7bb595fd046a5a5c83e9f9524822f071f6b602f08d14b653", size = 1987506, upload-time = "2025-10-13T19:31:01.117Z" }, + { url = "https://files.pythonhosted.org/packages/37/29/8d16b6f88284fe46392034fd20e08fe1228f5ed63726b8f5068cc73f9b46/pydantic_core-2.41.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d972c97e91e294f1ce4c74034211b5c16d91b925c08704f5786e5e3743d8a20", size = 2025386, upload-time = "2025-10-13T19:31:03.055Z" }, + { url = "https://files.pythonhosted.org/packages/47/60/f7291e1264831136917e417b1ec9ed70dd64174a4c8ff4d75cad3028aab5/pydantic_core-2.41.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91dfe6a6e02916fd1fb630f1ebe0c18f9fd9d3cbfe84bb2599f195ebbb0edb9b", size = 2107996, upload-time = "2025-10-13T19:31:04.902Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/362832ea8b890f5821ada95cd72a0da1b2466f88f6ac1a47cf1350136722/pydantic_core-2.41.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e301551c63d46122972ab5523a1438772cdde5d62d34040dac6f11017f18cc5d", size = 1916194, upload-time = "2025-10-13T19:31:06.313Z" }, + { url = "https://files.pythonhosted.org/packages/90/ca/893c63b84ca961d81ae33e4d1e3e00191e29845a874c7f4cc3ca1aa61157/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d986b1defbe27867812dc3d8b3401d72be14449b255081e505046c02687010a", size = 1969065, upload-time = "2025-10-13T19:31:07.719Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/fecd085420a500acbf3bfc542d2662f2b37497f740461b5e960277f199f0/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:351b2c5c073ae8caaa11e4336f8419d844c9b936e123e72dbe2c43fa97e54781", size = 2049849, upload-time = "2025-10-13T19:31:09.166Z" }, + { url = "https://files.pythonhosted.org/packages/26/55/e351b6f51c6b568a911c672c8e3fd809d10f6deaa475007b54e3c0b89f0f/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be34f5217ffc28404fc0ca6f07491a2a6a770faecfcf306384c142bccd2fdb4", size = 2244780, upload-time = "2025-10-13T19:31:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/87873bb56e5055d1aadfd84affa33cbf164e923d674c17ca898ad53db08e/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3cbcad992c281b4960cb5550e218ff39a679c730a59859faa0bc9b8d87efbe6a", size = 2362221, upload-time = "2025-10-13T19:31:13.183Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/2a3fb1e3b5f47754935a726ff77887246804156a029c5394daf4263a3e88/pydantic_core-2.41.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8741b0ab2acdd20c804432e08052791e66cf797afa5451e7e435367f88474b0b", size = 2070695, upload-time = "2025-10-13T19:31:14.849Z" }, + { url = "https://files.pythonhosted.org/packages/78/ac/d66c1048fcd60e995913809f9e3fcca1e6890bc3588902eab9ade63aa6d8/pydantic_core-2.41.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ac3ba94f3be9437da4ad611dacd356f040120668c5b1733b8ae035a13663c48", size = 2185138, upload-time = "2025-10-13T19:31:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/6fbbd67d0629392ccd5eea8a8b4c005f0151c5505ad22f9b1ff74d63d9f1/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:971efe83bac3d5db781ee1b4836ac2cdd53cf7f727edfd4bb0a18029f9409ef2", size = 2148858, upload-time = "2025-10-13T19:31:18.311Z" }, + { url = "https://files.pythonhosted.org/packages/1c/08/453385212db8db39ed0b6a67f2282b825ad491fed46c88329a0b9d0e543e/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:98c54e5ad0399ac79c0b6b567693d0f8c44b5a0d67539826cc1dd495e47d1307", size = 2315038, upload-time = "2025-10-13T19:31:19.95Z" }, + { url = "https://files.pythonhosted.org/packages/53/b9/271298376dc561de57679a82bf4777b9cf7df23881d487b17f658ef78eab/pydantic_core-2.41.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60110fe616b599c6e057142f2d75873e213bc0cbdac88f58dda8afb27a82f978", size = 2324458, upload-time = "2025-10-13T19:31:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/17/93/126ac22c310a64dc24d833d47bd175098daa3f9eab93043502a2c11348b4/pydantic_core-2.41.3-cp311-cp311-win32.whl", hash = "sha256:75428ae73865ee366f159b68b9281c754df832494419b4eb46b7c3fbdb27756c", size = 1986636, upload-time = "2025-10-13T19:31:23.08Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/703a31dc6ede00b4e394e5b81c14f462fe5654d3064def17dd64d4389a1a/pydantic_core-2.41.3-cp311-cp311-win_amd64.whl", hash = "sha256:c0178ad5e586d3e394f4b642f0bb7a434bcf34d1e9716cc4bd74e34e35283152", size = 2023792, upload-time = "2025-10-13T19:31:25.011Z" }, + { url = "https://files.pythonhosted.org/packages/f4/e3/2166b56df1bbe92663b8971012bf7dbd28b6a95e1dc9ad1ec9c99511c41e/pydantic_core-2.41.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dd40bb57cdae2a35e20d06910b93b13e8f57ffff5a0b0a45927953bad563a03", size = 1968147, upload-time = "2025-10-13T19:31:26.611Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/3149cae2a61ddd11c206cde9dab7598a53cfabe8e69850507876988d2047/pydantic_core-2.41.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bdc8b70bc4b68e4d891b46d018012cac7bbfe3b981a7c874716dde09ff09fd5", size = 2098919, upload-time = "2025-10-13T19:31:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/53/64/1717c7c5b092c64e5022b0d02b11703c2c94c31d897366b6c8d160b7d1de/pydantic_core-2.41.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446361e93f4ffe509edae5862fb89a0d24cbc8f2935f05c6584c2f2ca6e7b6df", size = 1910372, upload-time = "2025-10-13T19:31:30.351Z" }, + { url = "https://files.pythonhosted.org/packages/99/ba/0231b5dde6c1c436e0d58aed7d63f927694d92c51aff739bf692142ce6e6/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af9a9ae24b866ce58462a7de61c33ff035e052b7a9c05c29cf496bd6a16a63f", size = 1952392, upload-time = "2025-10-13T19:31:32.345Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/1adbfa682a56544d70b42931f19de44a4e58a4fc2152da343a2fdfd4cad5/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc836eb8561f04fede7b73747463bd08715be0f55c427e0f0198aa2f1d92f913", size = 2041093, upload-time = "2025-10-13T19:31:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/9d14041f0b125a5d6388957cace43f9dfb80d862e56a0685dde431a20b6a/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16f80f366472eb6a3744149289c263e5ef182c8b18422192166b67625fef3c50", size = 2214331, upload-time = "2025-10-13T19:31:36.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/384988d065596fafecf9baeab0c66ef31610013b26eec3b305a80ab5f669/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d699904cd13d0f509bdbb17f0784abb332d4aa42df4b0a8b65932096fcd4b21", size = 2344450, upload-time = "2025-10-13T19:31:38.905Z" }, + { url = "https://files.pythonhosted.org/packages/a3/13/1b0dd34fce51a746823a347d7f9e02c6ea09078ec91c5f656594c23d2047/pydantic_core-2.41.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485398dacc5dddb2be280fd3998367531eccae8631f4985d048c2406a5ee5ecc", size = 2070507, upload-time = "2025-10-13T19:31:41.093Z" }, + { url = "https://files.pythonhosted.org/packages/29/a6/0f8d6d67d917318d842fe8dba2489b0c5989ce01fc1ed58bf204f80663df/pydantic_core-2.41.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dfe0898272bf675941cd1ea701677341357b77acadacabbd43d71e09763dceb", size = 2185401, upload-time = "2025-10-13T19:31:42.785Z" }, + { url = "https://files.pythonhosted.org/packages/e9/23/b8a82253736f2efd3b79338dfe53866b341b68868fbce7111ff6b040b680/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:86ffbf5291c367a56b5718590dc3452890f2c1ac7b76d8f4a1e66df90bd717f6", size = 2131929, upload-time = "2025-10-13T19:31:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/7c/16/efe252cbf852ebfcb4978820e7681d83ae45c526cbfc0cf847f70de49850/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:c58c5acda77802eedde3aaf22be09e37cfec060696da64bf6e6ffb2480fdabd0", size = 2307223, upload-time = "2025-10-13T19:31:48.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ea/7d8eba2c37769d8768871575be449390beb2452a2289b0090ea7fa63f920/pydantic_core-2.41.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40db5705aec66371ca5792415c3e869137ae2bab48c48608db3f84986ccaf016", size = 2312962, upload-time = "2025-10-13T19:31:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/02/c4/b617e33c3b6f4a99c7d252cc42df958d14627a09a1a935141fb9abe44189/pydantic_core-2.41.3-cp312-cp312-win32.whl", hash = "sha256:668fcb317a0b3c84781796891128111c32f83458d436b022014ed0ea07f66e1b", size = 1988735, upload-time = "2025-10-13T19:31:51.778Z" }, + { url = "https://files.pythonhosted.org/packages/24/fc/05bb0249782893b52baa7732393c0bac9422d6aab46770253f57176cddba/pydantic_core-2.41.3-cp312-cp312-win_amd64.whl", hash = "sha256:248a5d1dac5382454927edf32660d0791d2df997b23b06a8cac6e3375bc79cee", size = 2032239, upload-time = "2025-10-13T19:31:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/75/1d/7637f6aaafdbc27205296bde9843096bd449192986b5523869444f844b82/pydantic_core-2.41.3-cp312-cp312-win_arm64.whl", hash = "sha256:347a23094c98b7ea2ba6fff93b52bd2931a48c9c1790722d9e841f30e4b7afcd", size = 1969072, upload-time = "2025-10-13T19:31:55.7Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a6/7533cba20b8b66e209d8d2acbb9ccc0bc1b883b0654776d676e02696ef5d/pydantic_core-2.41.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a8596700fdd3ee12b0d9c1f2395f4c32557e7ebfbfacdc08055b0bcbe7d2827e", size = 2105686, upload-time = "2025-10-13T19:31:57.675Z" }, + { url = "https://files.pythonhosted.org/packages/84/d7/2d15cb9dfb9f94422fb4a8820cbfeb397e3823087c2361ef46df5c172000/pydantic_core-2.41.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624503f918e472c0eed6935020c01b6a6b4bcdb7955a848da5c8805d40f15c0f", size = 1910554, upload-time = "2025-10-13T19:32:00.037Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/cbd1caa19e88fd64df716a37b49e5864c1ac27dbb9eb870b8977a584fa42/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36388958d0c614df9f5de1a5f88f4b79359016b9ecdfc352037788a628616aa2", size = 1957559, upload-time = "2025-10-13T19:32:02.603Z" }, + { url = "https://files.pythonhosted.org/packages/3b/fe/da942ae51f602173556c627304dc24b9fa8bd04423bce189bf397ba0419e/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c50eba144add9104cf43ef9a3d81c37ebf48bfd0924b584b78ec2e03ec91daf", size = 2051084, upload-time = "2025-10-13T19:32:05.056Z" }, + { url = "https://files.pythonhosted.org/packages/c8/62/0abd59a7107d1ef502b9cfab68145c6bb87115c2d9e883afbf18b98fe6db/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ea2102958eb5ad560d570c49996e215a6939d9bffd0e9fd3b9e808a55008cc", size = 2218098, upload-time = "2025-10-13T19:32:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/72/b1/93a36aa119b70126f3f0d06b6f9a81ca864115962669d8a85deb39c82ecc/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd0d26f1e4335d5f84abfc880da0afa080c8222410482f9ee12043bb05f55ec8", size = 2341954, upload-time = "2025-10-13T19:32:08.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/be/7c2563b53b71ff3e41950b0ffa9eeba3d702091c6d59036fff8a39050528/pydantic_core-2.41.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41c38700094045b12c0cff35c8585954de66cf6dd63909fed1c2e6b8f38e1e1e", size = 2069474, upload-time = "2025-10-13T19:32:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ac/2394004db9f6e03712c1e52f40f0979750fa87721f6baf5f76ad92b8be46/pydantic_core-2.41.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4061cc82d7177417fdb90e23e67b27425ecde2652cfd2053b5b4661a489ddc19", size = 2190633, upload-time = "2025-10-13T19:32:12.731Z" }, + { url = "https://files.pythonhosted.org/packages/7d/31/7b70c2d1fe41f450f8022f5523edaaea19c17a2d321fab03efd03aea1fe8/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b1d9699a4dae10a7719951cca1e30b591ef1dd9cdda9fec39282a283576c0241", size = 2137097, upload-time = "2025-10-13T19:32:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ae/f872198cffc8564f52c4ef83bcd3e324e5ac914e168c6b812f5ce3f80aab/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d5099f1b97e79f0e45cb6a236a5bd1a20078ed50b1b28f3d17f6c83ff3585baa", size = 2316771, upload-time = "2025-10-13T19:32:16.586Z" }, + { url = "https://files.pythonhosted.org/packages/23/50/f0fce3a9a7554ced178d943e1eada58b15fca896e9eb75d50244fc12007c/pydantic_core-2.41.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b5ff0467a8c1b6abb0ab9c9ea80e2e3a9788592e44c726c2db33fdaf1b5e7d0b", size = 2319449, upload-time = "2025-10-13T19:32:18.503Z" }, + { url = "https://files.pythonhosted.org/packages/15/1f/86a6948408e8388604c02ffde651a2e39b711bd1ab6eeaff376094553a10/pydantic_core-2.41.3-cp313-cp313-win32.whl", hash = "sha256:edfe9b4cee4a91da7247c25732f24504071f3e101c050694d18194b7d2d320bf", size = 1995352, upload-time = "2025-10-13T19:32:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/6dac37c3f62684dc459a31623d8ae97ee433fd68bb827e5c64dd831a5087/pydantic_core-2.41.3-cp313-cp313-win_amd64.whl", hash = "sha256:44af3276c0c2c14efde6590523e4d7e04bcd0e46e0134f0dbef1be0b64b2d3e3", size = 2031894, upload-time = "2025-10-13T19:32:23.11Z" }, + { url = "https://files.pythonhosted.org/packages/fd/75/3d9ba041a3fcb147279fbb37d2468efe62606809fec97b8de78174335ef4/pydantic_core-2.41.3-cp313-cp313-win_arm64.whl", hash = "sha256:59aeed341f92440d51fdcc82c8e930cfb234f1843ed1d4ae1074f5fb9789a64b", size = 1974036, upload-time = "2025-10-13T19:32:25.219Z" }, + { url = "https://files.pythonhosted.org/packages/50/68/45842628ccdb384df029f884ef915306d195c4f08b66ca4d99867edc6338/pydantic_core-2.41.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ef37228238b3a280170ac43a010835c4a7005742bc8831c2c1a9560de4595dbe", size = 1876856, upload-time = "2025-10-13T19:32:27.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/73/336a82910c6a482a0ba9a255c08dcc456ebca9735df96d7a82dffe17626a/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cb19f36253152c509abe76c1d1b185436e0c75f392a82934fe37f4a1264449", size = 1884665, upload-time = "2025-10-13T19:32:29.567Z" }, + { url = "https://files.pythonhosted.org/packages/34/87/ec610a7849561e0ef7c25b74ef934d154454c3aac8fb595b899557f3c6ab/pydantic_core-2.41.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91be4756e05367ce19a70e1db3b77f01f9e40ca70d26fb4cdfa993e53a08964a", size = 2043067, upload-time = "2025-10-13T19:32:31.506Z" }, + { url = "https://files.pythonhosted.org/packages/db/b4/5f2b0cf78752f9111177423bd5f2bc0815129e587c13401636b8900a417e/pydantic_core-2.41.3-cp313-cp313t-win_amd64.whl", hash = "sha256:ce7d8f4353f82259b55055bd162bbaf599f6c40cd0c098e989eeb95f9fdc022f", size = 1996799, upload-time = "2025-10-13T19:32:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/07e7f19a6a44a52abd48846e348e11fa1b3de5ed7c0231d53f055ffb365f/pydantic_core-2.41.3-cp313-cp313t-win_arm64.whl", hash = "sha256:f06a9e81da60e5a0ef584f6f4790f925c203880ae391bf363d97126fd1790b21", size = 1969574, upload-time = "2025-10-13T19:32:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/db32fbced75853c1d8e7ada8cb2b837ade99b2f281de569908de3e29f0bf/pydantic_core-2.41.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:0c77e8e72344e34052ea26905fa7551ecb75fc12795ca1a8e44f816918f4c718", size = 2103383, upload-time = "2025-10-13T19:32:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/28/5bcb3327b3777994633f4cb459c5dc34a9cbe6cf0ac449d3e8f1e74bdaaa/pydantic_core-2.41.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:32be442a017e82a6c496a52ef5db5f5ac9abf31c3064f5240ee15a1d27cc599e", size = 1904974, upload-time = "2025-10-13T19:32:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/71/8d/c9d8cad7c02d63869079fb6fb61b8ab27adbeeda0bf130c684fe43daa126/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af10c78f0e9086d2d883ddd5a6482a613ad435eb5739cf1467b1f86169e63d91", size = 1956879, upload-time = "2025-10-13T19:32:41.849Z" }, + { url = "https://files.pythonhosted.org/packages/15/b1/8a84b55631a45375a467df288d8f905bec0abadb1e75bce3b32402b49733/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6212874118704e27d177acee5b90b83556b14b2eb88aae01bae51cd9efe27019", size = 2051787, upload-time = "2025-10-13T19:32:43.86Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/a84ea9cb7ba4dbfd43865e5dd536b22c78ee763d82d501c6f6a553403c00/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6a24c82674a3a8e7f7306e57e98219e5c1cdfc0f57bc70986930dda136230b2", size = 2217830, upload-time = "2025-10-13T19:32:46.053Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2c/64233c77410e314dbb7f2e8112be7f56de57cf64198a32d8ab3f7b74adf4/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e0c81dc047c18059410c959a437540abcefea6a882d6e43b9bf45c291eaacd9", size = 2341131, upload-time = "2025-10-13T19:32:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/23/3d/915b90eb0de93bd522b293fd1a986289f5d576c72e640f3bb426b496d095/pydantic_core-2.41.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0d7e1a9f80f00a8180b9194ecef66958eb03f3c3ae2d77195c9d665ac0a61e", size = 2063797, upload-time = "2025-10-13T19:32:50.458Z" }, + { url = "https://files.pythonhosted.org/packages/4d/25/a65665caa86e496e19feef48e6bd9263c1a46f222e8f9b0818f67bd98dc3/pydantic_core-2.41.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2868fabfc35ec0738539ce0d79aab37aeffdcb9682b9b91f0ac4b0ba31abb1eb", size = 2193041, upload-time = "2025-10-13T19:32:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/a7f7e17f99ee691a7d93a53aa41bf7d1b1d425945b6e9bc8020498a413e1/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:cb4f40c93307e1c50996e4edcddf338e1f3f1fb86fb69b654111c6050ae3b081", size = 2136119, upload-time = "2025-10-13T19:32:54.737Z" }, + { url = "https://files.pythonhosted.org/packages/5f/92/c27c1f3edd06e04af71358aa8f4d244c8bc6726e3fb47e00157d3dffe66f/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:287cbcd3407a875eaf0b1efa2e5288493d5b79bfd3629459cf0b329ad8a9071a", size = 2317223, upload-time = "2025-10-13T19:32:56.927Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/20aabe3c32888fb13d4726e405716fed14b1d4d1d4292d585862c1458b7b/pydantic_core-2.41.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:5253835aa145049205a67056884555a936f9b3fea7c3ce860bff62be6a1ae4d1", size = 2320425, upload-time = "2025-10-13T19:32:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/67/d2/476d4bc6b3070e151ae920167f27f26415e12f8fcc6cf5a47a613aba7267/pydantic_core-2.41.3-cp314-cp314-win32.whl", hash = "sha256:69297795efe5349156d18eebea818b75d29a1d3d1d5f26a250f22ab4220aacd6", size = 1994216, upload-time = "2025-10-13T19:33:01.484Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/2cd8515584b3d665ca3c4d946364c2a9932d0d5648694c2a10d273cde81c/pydantic_core-2.41.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1c133e3447c2f6d95e47ede58fff0053370758112a1d39117d0af8c93584049", size = 2026522, upload-time = "2025-10-13T19:33:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/c9f2791d7188594f0abdc1b7fe8ec3efc123ee2d9c553fd3b6da2d9fd53d/pydantic_core-2.41.3-cp314-cp314-win_arm64.whl", hash = "sha256:54534eecbb7a331521f832e15fc307296f491ee1918dacfd4d5b900da6ee3332", size = 1969070, upload-time = "2025-10-13T19:33:05.604Z" }, + { url = "https://files.pythonhosted.org/packages/b5/eb/45f9a91f8c09f4cfb62f78dce909b20b6047ce4fd8d89310fcac5ad62e54/pydantic_core-2.41.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6b4be10152098b43c093a4b5e9e9da1ac7a1c954c1934d4438d07ba7b7bcf293", size = 1876593, upload-time = "2025-10-13T19:33:07.814Z" }, + { url = "https://files.pythonhosted.org/packages/99/f8/5c9d0959e0e1f260eea297a5ecc1dc29a14e03ee6a533e805407e8403c1a/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe4ebd676c158a7994253161151b476dbbef2acbd2f547cfcfdf332cf67cc29", size = 1882977, upload-time = "2025-10-13T19:33:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/7ab918e35f55e7beee471ba8c67dfc4c9c19a8904e4867bfda7f9c76a72e/pydantic_core-2.41.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984ca0113b39dda1d7c358d6db03dd6539ef244d0558351806c1327239e035bf", size = 2041033, upload-time = "2025-10-13T19:33:12.216Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c8/5b12e5a36410ebcd0082ae5b0258150d72762e306f298cc3fe731b5574ec/pydantic_core-2.41.3-cp314-cp314t-win_amd64.whl", hash = "sha256:2a7dd8a6f5a9a2f8c7f36e4fc0982a985dbc4ac7176ee3df9f63179b7295b626", size = 1994462, upload-time = "2025-10-13T19:33:14.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f6/c6f3b7244a2a0524f4a04052e3d590d3be0ba82eb1a2f0fe5d068237701e/pydantic_core-2.41.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b387f08b378924fa82bd86e03c9d61d6daca1a73ffb3947bdcfe12ea14c41f68", size = 1973551, upload-time = "2025-10-13T19:33:16.87Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/837dc1d5f09728590ace987fcaad83ec4539dcd73ce4ea5a0b786ee0a921/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:98ad9402d6cc194b21adb4626ead88fcce8bc287ef434502dbb4d5b71bdb9a47", size = 2122049, upload-time = "2025-10-13T19:33:49.808Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/d9c6d70571219d826381049df60188777de0283d7f01077bfb7ec26cb121/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:539b1c01251fbc0789ad4e1dccf3e888062dd342b2796f403406855498afbc36", size = 1936957, upload-time = "2025-10-13T19:33:52.768Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d3/5e69eba2752a47815adcf9ff7fcfdb81c600b7c87823037d8e746db835cf/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12019e3a4ded7c4e84b11a761be843dfa9837444a1d7f621888ad499f0f72643", size = 1957032, upload-time = "2025-10-13T19:33:55.46Z" }, + { url = "https://files.pythonhosted.org/packages/4c/98/799db4be56a16fb22152c5473f806c7bb818115f1648bee3ac29a7d5fb9e/pydantic_core-2.41.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e01519c8322a489167abb1aceaab1a9e4c7d3e665dc3f7b0b1355910fcb698", size = 2140010, upload-time = "2025-10-13T19:33:57.881Z" }, + { url = "https://files.pythonhosted.org/packages/68/e6/a41dec3d50cfbd7445334459e847f97a62c5658d2c6da268886928ffd357/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:a6ded5abbb7391c0db9e002aaa5f0e3a49a024b0a22e2ed09ab69087fd5ab8a8", size = 2112077, upload-time = "2025-10-13T19:34:00.77Z" }, + { url = "https://files.pythonhosted.org/packages/44/38/e136a52ae85265a07999439cd8dcd24ba4e83e23d61e40000cd74b426f19/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:43abc869cce9104ff35cb4eff3028e9a87346c95fe44e0173036bf4d782bdc3d", size = 1920464, upload-time = "2025-10-13T19:34:03.454Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/a3f509f682818ded836bd006adce08d731d81c77694a26a0a1a448f3e351/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb3c63f4014a603caee687cd5c3c63298d2c8951b7acb2ccd0befbf2e1c0b8ad", size = 1951926, upload-time = "2025-10-13T19:34:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/cb30ad2a0147cc7763c0c805ee1c534f6ed5d5db7bc8cf8ebaf34b4c9dab/pydantic_core-2.41.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88461e25f62e58db4d8b180e2612684f31b5844db0a8f8c1c421498c97bc197b", size = 2139233, upload-time = "2025-10-13T19:34:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/61/39/92380b350c0f22ae2c8ca11acc8b45ac39de55b8b750680459527e224d86/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:219a95d7638c6b3a50de749747afdf1c2bdf027653e4a3e1df2fefa1e238d8eb", size = 2108918, upload-time = "2025-10-13T19:34:10.79Z" }, + { url = "https://files.pythonhosted.org/packages/bf/94/683a4efcbd1c890b88d6898a46e537b443eaf157bf78fb44f47a2474d47a/pydantic_core-2.41.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21d4e730b75cfc62b3e24261030bd223ed5f867039f971027c551a7ab911f460", size = 1930618, upload-time = "2025-10-13T19:34:13.226Z" }, + { url = "https://files.pythonhosted.org/packages/38/b4/44a6ce874bc629a0a4a42a0370955ff46b2db302bfcd895d69b28e73372a/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d9a98a80309189a49cffcd507c85032a2df35d005bd12d655f425ca80eec3d", size = 2135930, upload-time = "2025-10-13T19:34:15.592Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/1bf4ad96b1679e0889c21707c767f0b2a5910413b2587ea830eee620c74c/pydantic_core-2.41.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f7d53153eb2a5c2f7a8cccf1a45022e2b75668cad274f998b43313da03053d", size = 2182112, upload-time = "2025-10-13T19:34:18.209Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ed/6c39d1ba28b00459baa452629d6cdf3fbbfd40d774655a6c15b8af3b7312/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e2135eff48d3b6a2abfe7b26395d350ea76a460d3de3cf2521fe2f15f222fa29", size = 2146549, upload-time = "2025-10-13T19:34:20.652Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fd/550a234486e69682311f060be25c2355fd28434d4506767a729a7902ee2d/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:005bf20e48f6272803de8ba0be076e5bd7d015b7f02ebcc989bc24f85636d1d8", size = 2311299, upload-time = "2025-10-13T19:34:23.097Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5c/61cb3ad96dcba2fe4c5a618c9ad30661077da22fdae190c4aefbee5a1cc3/pydantic_core-2.41.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d4ebfa1864046c44669cd789a613ec39ee194fe73842e369d129d716730216d9", size = 2321969, upload-time = "2025-10-13T19:34:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/6b10a391feb74d2ff21b5597a632f7f9ad50afe3a9bfe1de0a1b10aee0cb/pydantic_core-2.41.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cb82cd643a2ad7ebf94bdb7fa6c339801b0fe8c7920610d6da7b691647ef5842", size = 2150346, upload-time = "2025-10-13T19:34:28.101Z" }, + { url = "https://files.pythonhosted.org/packages/1d/84/14c7ed3428feb718792fc2ecc5d04c12e46cb5c65620717c6826428ee468/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5e67f86ffb40127851dba662b2d0ab400264ed37cfedeab6100515df41ccb325", size = 2106894, upload-time = "2025-10-13T19:34:30.905Z" }, + { url = "https://files.pythonhosted.org/packages/ea/5d/d129794fc3990a49b12963d7cc25afc6a458fe85221b8a78cf46c5f22135/pydantic_core-2.41.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ecad4d7d264f6df23db68ca3024919a7aab34b4c44d9a9280952863a7a0c5e81", size = 1929911, upload-time = "2025-10-13T19:34:33.399Z" }, + { url = "https://files.pythonhosted.org/packages/d3/89/8fe254b1725a48f4da1978fa21268f142846c2d653715161afc394e67486/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fce6e6505b9807d3c20476fa016d0bd4d54a858fe648d6f5ef065286410c3da7", size = 2133972, upload-time = "2025-10-13T19:34:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/75/26/eefc7f23167a8060e29fcbb99d15158729ea794ee5b5c11ecc4df73b21c9/pydantic_core-2.41.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05974468cff84ea112ad4992823f1300d822ad51df0eba4c3af3c4a4cbe5eca0", size = 2181777, upload-time = "2025-10-13T19:34:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/67/ba/03c5a00a9251fc5fe22d5807bc52cf0863b9486f0086a45094adee77fa0b/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:091d3966dc2379e07b45b4fd9651fbab5b24ea3c62cc40637beaf691695e5f5a", size = 2144699, upload-time = "2025-10-13T19:34:41.29Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4e/ee90dc6c99c8261c89ce1c2311395e7a0432dfc20db1bd6d9be917a92320/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:16f216e4371a05ad3baa5aed152eae056c7e724663c2bcbb38edd607c17baa89", size = 2311388, upload-time = "2025-10-13T19:34:43.843Z" }, + { url = "https://files.pythonhosted.org/packages/f5/01/7f3e4ed3963113e5e9df8077f3015facae0cd3a65ac5688d308010405a0e/pydantic_core-2.41.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2e169371f88113c8e642f7ac42c798109f1270832b577b5144962a7a028bfb0c", size = 2320916, upload-time = "2025-10-13T19:34:46.417Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d7/91ef73afa5c275962edd708559148e153d95866f8baf96142ab4804da67a/pydantic_core-2.41.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:83847aa6026fb7149b9ef06e10c73ff83ac1d2aa478b28caa4f050670c1c9a37", size = 2148327, upload-time = "2025-10-13T19:34:48.929Z" }, ] [[package]] diff --git a/libs/text-splitters/README.md b/libs/text-splitters/README.md index f35977099a1..293eebeca66 100644 --- a/libs/text-splitters/README.md +++ b/libs/text-splitters/README.md @@ -1,7 +1,11 @@ # πŸ¦œβœ‚οΈ LangChain Text Splitters -[![PyPI - License](https://img.shields.io/pypi/l/langchain-text-splitters?style=flat-square)](https://opensource.org/licenses/MIT) +[![PyPI - Version](https://img.shields.io/pypi/v/langchain-text-splitters?label=%20)](https://pypi.org/project/langchain-text-splitters/#history) +[![PyPI - License](https://img.shields.io/pypi/l/langchain-text-splitters)](https://opensource.org/licenses/MIT) [![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-text-splitters)](https://pypistats.org/packages/langchain-text-splitters) +[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) + +Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs). ## Quick Install @@ -9,28 +13,24 @@ pip install langchain-text-splitters ``` -## What is it? +## πŸ€” What is this? LangChain Text Splitters contains utilities for splitting into chunks a wide variety of text documents. -For full documentation see the [API reference](https://python.langchain.com/api_reference/text_splitters/index.html) -and the [Text Splitters](https://python.langchain.com/docs/how_to/#text-splitters) module in the main docs. +## πŸ“– Documentation + +For full documentation, see the [API reference](https://reference.langchain.com/python/langchain_text_splitters/). ## πŸ“• Releases & Versioning -Minor version increases will occur for: +See our [Releases](https://docs.langchain.com/oss/python/release-policy) and [Versioning](https://docs.langchain.com/oss/python/versioning) policies. -- Breaking changes for any public interfaces NOT marked `beta` +We encourage pinning your version to a specific version in order to avoid breaking your CI when we publish new tests. We recommend upgrading to the latest version periodically to make sure you have the latest tests. -Patch version increases will occur for: - -- Bug fixes -- New features -- Any changes to private interfaces -- Any changes to `beta` features +Not pinning your version will ensure you always have the latest tests, but it may also break your CI if we introduce tests that your integration doesn't pass. ## πŸ’ Contributing As an open-source project in a rapidly developing field, we are extremely open to contributions, whether it be in the form of a new feature, improved infrastructure, or better documentation. -For detailed information on how to contribute, see the [Contributing Guide](https://docs.langchain.com/oss/python/contributing). +For detailed information on how to contribute, see the [Contributing Guide](https://docs.langchain.com/oss/python/contributing/overview). diff --git a/libs/text-splitters/extended_testing_deps.txt b/libs/text-splitters/extended_testing_deps.txt index 8d45ad3ea8b..86de80a23ee 100644 --- a/libs/text-splitters/extended_testing_deps.txt +++ b/libs/text-splitters/extended_testing_deps.txt @@ -1,2 +1,2 @@ -lxml>=4.9.3,<6.0 +lxml>=4.9.3,<7.0 beautifulsoup4>=4.12.3,<5 diff --git a/libs/text-splitters/langchain_text_splitters/__init__.py b/libs/text-splitters/langchain_text_splitters/__init__.py index 828319c852d..623582f8d91 100644 --- a/libs/text-splitters/langchain_text_splitters/__init__.py +++ b/libs/text-splitters/langchain_text_splitters/__init__.py @@ -1,8 +1,8 @@ -"""**Text Splitters** are classes for splitting text. +"""Text Splitters are classes for splitting text. !!! note - **MarkdownHeaderTextSplitter** and **HTMLHeaderTextSplitter do not derive from - TextSplitter. + `MarkdownHeaderTextSplitter` and `HTMLHeaderTextSplitter` do not derive from + `TextSplitter`. """ from langchain_text_splitters.base import ( diff --git a/libs/text-splitters/langchain_text_splitters/base.py b/libs/text-splitters/langchain_text_splitters/base.py index 21d2a06c48c..ed06b78a498 100644 --- a/libs/text-splitters/langchain_text_splitters/base.py +++ b/libs/text-splitters/langchain_text_splitters/base.py @@ -60,10 +60,10 @@ class TextSplitter(BaseDocumentTransformer, ABC): chunk_overlap: Overlap in characters between chunks length_function: Function that measures the length of given chunks keep_separator: Whether to keep the separator and where to place it - in each corresponding chunk (True='start') + in each corresponding chunk `(True='start')` add_start_index: If `True`, includes chunk's start index in metadata strip_whitespace: If `True`, strips whitespace from the start and end of - every document + every document """ if chunk_size <= 0: msg = f"chunk_size must be > 0, got {chunk_size}" @@ -91,7 +91,7 @@ class TextSplitter(BaseDocumentTransformer, ABC): def create_documents( self, texts: list[str], metadatas: list[dict[Any, Any]] | None = None ) -> list[Document]: - """Create documents from a list of texts.""" + """Create a list of `Document` objects from a list of texts.""" metadatas_ = metadatas or [{}] * len(texts) documents = [] for i, text in enumerate(texts): @@ -170,7 +170,7 @@ class TextSplitter(BaseDocumentTransformer, ABC): def from_huggingface_tokenizer( cls, tokenizer: PreTrainedTokenizerBase, **kwargs: Any ) -> TextSplitter: - """Text splitter that uses HuggingFace tokenizer to count length.""" + """Text splitter that uses Hugging Face tokenizer to count length.""" if not _HAS_TRANSFORMERS: msg = ( "Could not import transformers python package. " @@ -196,7 +196,7 @@ class TextSplitter(BaseDocumentTransformer, ABC): disallowed_special: Literal["all"] | Collection[str] = "all", **kwargs: Any, ) -> Self: - """Text splitter that uses tiktoken encoder to count length.""" + """Text splitter that uses `tiktoken` encoder to count length.""" if not _HAS_TIKTOKEN: msg = ( "Could not import tiktoken python package. " @@ -276,11 +276,11 @@ class TokenTextSplitter(TextSplitter): `split_text_on_tokens` function. Args: - text (str): The input text to be split into smaller chunks. + text: The input text to be split into smaller chunks. Returns: - List[str]: A list of text chunks, where each chunk is derived from a portion - of the input text based on the tokenization and chunking rules. + A list of text chunks, where each chunk is derived from a portion + of the input text based on the tokenization and chunking rules. """ def _encode(_text: str) -> list[int]: @@ -341,9 +341,9 @@ class Tokenizer: tokens_per_chunk: int """Maximum number of tokens per chunk""" decode: Callable[[list[int]], str] - """ Function to decode a list of token ids to a string""" + """ Function to decode a list of token IDs to a string""" encode: Callable[[str], list[int]] - """ Function to encode a string to a list of token ids""" + """ Function to encode a string to a list of token IDs""" def split_text_on_tokens(*, text: str, tokenizer: Tokenizer) -> list[str]: diff --git a/libs/text-splitters/langchain_text_splitters/character.py b/libs/text-splitters/langchain_text_splitters/character.py index 9a073520e4a..22736f1df58 100644 --- a/libs/text-splitters/langchain_text_splitters/character.py +++ b/libs/text-splitters/langchain_text_splitters/character.py @@ -143,10 +143,10 @@ class RecursiveCharacterTextSplitter(TextSplitter): """Split the input text into smaller chunks based on predefined separators. Args: - text (str): The input text to be split. + text: The input text to be split. Returns: - List[str]: A list of text chunks obtained after splitting. + A list of text chunks obtained after splitting. """ return self._split_text(text, self._separators) @@ -159,12 +159,11 @@ class RecursiveCharacterTextSplitter(TextSplitter): This method initializes the text splitter with language-specific separators. Args: - language (Language): The language to configure the text splitter for. - **kwargs (Any): Additional keyword arguments to customize the splitter. + language: The language to configure the text splitter for. + **kwargs: Additional keyword arguments to customize the splitter. Returns: - RecursiveCharacterTextSplitter: An instance of the text splitter configured - for the specified language. + An instance of the text splitter configured for the specified language. """ separators = cls.get_separators_for_language(language) return cls(separators=separators, is_separator_regex=True, **kwargs) @@ -174,10 +173,10 @@ class RecursiveCharacterTextSplitter(TextSplitter): """Retrieve a list of separators specific to the given language. Args: - language (Language): The language for which to get the separators. + language: The language for which to get the separators. Returns: - List[str]: A list of separators appropriate for the specified language. + A list of separators appropriate for the specified language. """ if language in {Language.C, Language.CPP}: return [ diff --git a/libs/text-splitters/langchain_text_splitters/html.py b/libs/text-splitters/langchain_text_splitters/html.py index 179b110ab9c..2eee21a3b21 100644 --- a/libs/text-splitters/langchain_text_splitters/html.py +++ b/libs/text-splitters/langchain_text_splitters/html.py @@ -84,17 +84,17 @@ def _find_all_tags( class HTMLHeaderTextSplitter: """Split HTML content into structured Documents based on specified headers. - Splits HTML content by detecting specified header tags (e.g.,

,

) and - creating hierarchical Document objects that reflect the semantic structure - of the original content. For each identified section, the splitter associates - the extracted text with metadata corresponding to the encountered headers. + Splits HTML content by detecting specified header tags and creating hierarchical + `Document` objects that reflect the semantic structure of the original content. For + each identified section, the splitter associates the extracted text with metadata + corresponding to the encountered headers. If no specified headers are found, the entire content is returned as a single - Document. This allows for flexible handling of HTML input, ensuring that + `Document`. This allows for flexible handling of HTML input, ensuring that information is organized according to its semantic headers. The splitter provides the option to return each HTML element as a separate - Document or aggregate them into semantically meaningful chunks. It also + `Document` or aggregate them into semantically meaningful chunks. It also gracefully handles multiple levels of nested headers, creating a rich, hierarchical representation of the content. @@ -151,15 +151,15 @@ class HTMLHeaderTextSplitter: """Initialize with headers to split on. Args: - headers_to_split_on: A list of (header_tag, - header_name) pairs representing the headers that define splitting - boundaries. For example, [("h1", "Header 1"), ("h2", "Header 2")] - will split content by

and

tags, assigning their textual - content to the Document metadata. + headers_to_split_on: A list of `(header_tag, + header_name)` pairs representing the headers that define splitting + boundaries. For example, `[("h1", "Header 1"), ("h2", "Header 2")]` + will split content by `h1` and `h2` tags, assigning their textual + content to the `Document` metadata. return_each_element: If `True`, every HTML element encountered (including headers, paragraphs, etc.) is returned as a separate - Document. If `False`, content under the same header hierarchy is - aggregated into fewer Documents. + `Document`. If `False`, content under the same header hierarchy is + aggregated into fewer `Document` objects. """ # Sort headers by their numeric level so that h1 < h2 < h3... self.headers_to_split_on = sorted( @@ -170,15 +170,15 @@ class HTMLHeaderTextSplitter: self.return_each_element = return_each_element def split_text(self, text: str) -> list[Document]: - """Split the given text into a list of Document objects. + """Split the given text into a list of `Document` objects. Args: text: The HTML text to split. Returns: - A list of split Document objects. Each Document contains - `page_content` holding the extracted text and `metadata` that maps - the header hierarchy to their corresponding titles. + A list of split Document objects. Each `Document` contains + `page_content` holding the extracted text and `metadata` that maps + the header hierarchy to their corresponding titles. """ return self.split_text_from_file(StringIO(text)) @@ -189,13 +189,13 @@ class HTMLHeaderTextSplitter: Args: url: The URL to fetch content from. - timeout: Timeout for the request. Defaults to 10. + timeout: Timeout for the request. **kwargs: Additional keyword arguments for the request. Returns: - A list of split Document objects. Each Document contains - `page_content` holding the extracted text and `metadata` that maps - the header hierarchy to their corresponding titles. + A list of split Document objects. Each `Document` contains + `page_content` holding the extracted text and `metadata` that maps + the header hierarchy to their corresponding titles. Raises: requests.RequestException: If the HTTP request fails. @@ -205,15 +205,15 @@ class HTMLHeaderTextSplitter: return self.split_text(response.text) def split_text_from_file(self, file: str | IO[str]) -> list[Document]: - """Split HTML content from a file into a list of Document objects. + """Split HTML content from a file into a list of `Document` objects. Args: file: A file path or a file-like object containing HTML content. Returns: - A list of split Document objects. Each Document contains - `page_content` holding the extracted text and `metadata` that maps - the header hierarchy to their corresponding titles. + A list of split Document objects. Each `Document` contains + `page_content` holding the extracted text and `metadata` that maps + the header hierarchy to their corresponding titles. """ if isinstance(file, str): html_content = pathlib.Path(file).read_text(encoding="utf-8") @@ -339,7 +339,7 @@ class HTMLHeaderTextSplitter: class HTMLSectionSplitter: """Splitting HTML files based on specified tag and font sizes. - Requires lxml package. + Requires `lxml` package. """ def __init__( @@ -347,13 +347,13 @@ class HTMLSectionSplitter: headers_to_split_on: list[tuple[str, str]], **kwargs: Any, ) -> None: - """Create a new HTMLSectionSplitter. + """Create a new `HTMLSectionSplitter`. Args: headers_to_split_on: list of tuples of headers we want to track mapped to - (arbitrary) keys for metadata. Allowed header values: h1, h2, h3, h4, - h5, h6 e.g. [("h1", "Header 1"), ("h2", "Header 2"]. - **kwargs (Any): Additional optional arguments for customizations. + (arbitrary) keys for metadata. Allowed header values: `h1`, `h2`, `h3`, + `h4`, `h5`, `h6` e.g. `[("h1", "Header 1"), ("h2", "Header 2"]`. + **kwargs: Additional optional arguments for customizations. """ self.headers_to_split_on = dict(headers_to_split_on) @@ -385,7 +385,7 @@ class HTMLSectionSplitter: def create_documents( self, texts: list[str], metadatas: list[dict[Any, Any]] | None = None ) -> list[Document]: - """Create documents from a list of texts.""" + """Create a list of `Document` objects from a list of texts.""" metadatas_ = metadatas or [{}] * len(texts) documents = [] for i, text in enumerate(texts): @@ -408,14 +408,14 @@ class HTMLSectionSplitter: contains the header text, content under the header, and the tag name. Args: - html_doc (str): The HTML document to be split into sections. + html_doc: The HTML document to be split into sections. Returns: A list of dictionaries representing sections. Each dictionary contains: - * 'header': The header text or a default title for the first section. - * 'content': The content under the header. - * 'tag_name': The name of the header tag (e.g., "h1", "h2"). + * `'header'`: The header text or a default title for the first section. + * `'content'`: The content under the header. + * `'tag_name'`: The name of the header tag (e.g., `h1`, `h2`). """ if not _HAS_BS4: msg = "Unable to import BeautifulSoup/PageElement, \ @@ -464,10 +464,10 @@ class HTMLSectionSplitter: the HTML content is returned unchanged. Args: - html_content (str): The HTML content to be transformed. + html_content: The HTML content to be transformed. Returns: - str: The transformed HTML content as a string. + The transformed HTML content as a string. """ if not _HAS_LXML: msg = "Unable to import lxml, please install with `pip install lxml`." @@ -491,7 +491,7 @@ class HTMLSectionSplitter: return str(result) def split_text_from_file(self, file: StringIO) -> list[Document]: - """Split HTML content from a file into a list of Document objects. + """Split HTML content from a file into a list of `Document` objects. Args: file: A file path or a file-like object containing HTML content. @@ -524,12 +524,12 @@ class HTMLSemanticPreservingSplitter(BaseDocumentTransformer): structure. If chunks exceed the maximum chunk size, it uses RecursiveCharacterTextSplitter for further splitting. - The splitter preserves full HTML elements (e.g., ,
    ) and converts - links to Markdown-like links. It can also preserve images, videos, and audio - elements by converting them into Markdown format. Note that some chunks may - exceed the maximum size to maintain semantic integrity. + The splitter preserves full HTML elements and converts links to Markdown-like links. + It can also preserve images, videos, and audio elements by converting them into + Markdown format. Note that some chunks may exceed the maximum size to maintain + semantic integrity. - !!! version-added "Added in version 0.3.5" + !!! version-added "Added in `langchain-text-splitters` 0.3.5" Example: ```python @@ -584,22 +584,22 @@ class HTMLSemanticPreservingSplitter(BaseDocumentTransformer): """Initialize splitter. Args: - headers_to_split_on: HTML headers (e.g., "h1", "h2") + headers_to_split_on: HTML headers (e.g., `h1`, `h2`) that define content sections. max_chunk_size: Maximum size for each chunk, with allowance for exceeding this limit to preserve semantics. chunk_overlap: Number of characters to overlap between chunks to ensure contextual continuity. - separators: Delimiters used by RecursiveCharacterTextSplitter for + separators: Delimiters used by `RecursiveCharacterTextSplitter` for further splitting. - elements_to_preserve: HTML tags (e.g.,
,