feat(model): Proxy multimodal supports (#2641)

This commit is contained in:
Fangyin Cheng 2025-04-21 19:36:29 +08:00 committed by GitHub
parent a6680610b9
commit 3d7d52250f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 491 additions and 17 deletions

View File

@ -59,7 +59,7 @@ fmt: setup ## Format Python code
$(VENV_BIN)/ruff check --select I --fix packages
$(VENV_BIN)/ruff check --select I --fix --exclude="examples/notebook" examples
$(VENV_BIN)/ruff check --select I --fix i18n
$(VENV_BIN)/ruff check --select I --fix update_version_all.py
$(VENV_BIN)/ruff check --select I --fix scripts/update_version_all.py
$(VENV_BIN)/ruff check --select I --fix install_help.py
$(VENV_BIN)/ruff check --fix packages \

View File

@ -0,0 +1,141 @@
# Multimodal Support in DB-GPT
DB-GPT supports multimodal capabilities, allowing you to work with various data types such as text, images, and audio. This guide will help you set up and use multimodal features in DB-GPT.
This guide includes run local model and proxy model.
## Run Local Model
In this section, we will use the [Kimi-VL-A3B-Thinking](https://huggingface.co/moonshotai/Kimi-VL-A3B-Thinking)
model as an example to demonstrate how to run a local multimodal model.
### Step 1: Install Dependencies
Make sure you have the required dependencies installed. You can do this by running:
```bash
uv sync --all-packages \
--extra "base" \
--extra "hf" \
--extra "cuda121" \
--extra "rag" \
--extra "storage_chromadb" \
--extra "quant_bnb" \
--extra "dbgpts" \
--extra "model_vl" \
--extra "hf_kimi"
```
### Step 2: Modify Configuration File
After installing the dependencies, you can modify your configuration file to use the `Kimi-VL-A3B-Thinking` model.
You can create a new configuration file or modify an existing one. Below is an example configuration file:
```toml
# Model Configurations
[models]
[[models.llms]]
name = "moonshotai/Kimi-VL-A3B-Thinking"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
```
### Step 3: Run the Model
You can run the model using the following command:
```bash
uv run dbgpt start webserver --config {your_config_file}
```
### Step 4: Use The Model In DB-GPT
Now, DB-GPT just support image input, and only the `Chat Normal` scenario is supported.
You can click the `+` button in the chat window to upload an image. Then type your question in the input box and hit enter. The model will process the image and provide a response based on the content of the image.
<p align="left">
<img src={'/img/installation/advanced_usage/dbgpt-multimodal-local.jpg'} width="720px"/>
</p>
## Run Proxy Model
In this section, we will use the [Qwen/Qwen2.5-VL-32B-Instruct](https://huggingface.co/Qwen/Qwen2.5-VL-32B-Instruct) which is hosted on [SiliconFlow](https://siliconflow.cn/) as an example to demonstrate how to run a proxy multimodal model.
### Step 1: Install Dependencies
Make sure you have the required dependencies installed. You can do this by running:
```bash
uv sync --all-packages \
--extra "base" \
--extra "proxy_openai" \
--extra "rag" \
--extra "storage_chromadb" \
--extra "dbgpts" \
--extra "model_vl" \
--extra "file_s3"
```
Now, most proxy model can't receive image raw data, so you need to upload your image to a storage service like S3, MinIO, Aliyun OSS, etc, then generate a public URL for the image. Because many storages will provide a S3 compatible API, you can use the `file_s3` extra to upload your image to your storage service.
### Step 2: Modify Configuration File
After installing the dependencies, you can modify your configuration file to use the `Qwen/Qwen2.5-VL-32B-Instruct` model.
You can create a new configuration file or modify an existing one. Below is an example configuration file:
```toml
# Model Configurations
[[models.llms]]
name = "Qwen/Qwen2.5-VL-32B-Instruct"
provider = "proxy/siliconflow"
api_key = "${env:SILICONFLOW_API_KEY}"
[[serves]]
type = "file"
# Default backend for file server
default_backend = "s3"
[[serves.backends]]
# Use Tencent COS s3 compatible API as the file server
type = "s3"
endpoint = "https://cos.ap-beijing.myqcloud.com"
region = "ap-beijing"
access_key_id = "${env:COS_SECRETID}"
access_key_secret = "${env:COS_SECRETKEY}"
fixed_bucket = "{your_bucket_name}"
```
Optionally, you can use the Aliyun OSS storage service as the file server(You should install dependency `--extra "file_oss"` first).
```toml
[[serves]]
type = "file"
# Default backend for file server
default_backend = "oss"
[[serves.backends]]
type = "oss"
endpoint = "https://oss-cn-beijing.aliyuncs.com"
region = "oss-cn-beijing"
access_key_id = "${env:OSS_ACCESS_KEY_ID}"
access_key_secret = "${env:OSS_ACCESS_KEY_SECRET}"
fixed_bucket = "{your_bucket_name}"
```
### Step 3: Run the Model
You can run the model using the following command:
```bash
uv run dbgpt start webserver --config {your_config_file}
```
### Step 4: Use The Model In DB-GPT
<p align="left">
<img src={'/img/installation/advanced_usage/dbgpt-multimodal-proxy.jpg'} width="720px"/>
</p>

View File

@ -174,6 +174,10 @@ const sidebars = {
type: 'doc',
id: 'installation/advanced_usage/OpenAI_SDK_call',
},
{
type: 'doc',
id: 'installation/advanced_usage/multimodal',
},
],
},
],

Binary file not shown.

After

Width:  |  Height:  |  Size: 806 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 487 KiB

View File

@ -17,6 +17,7 @@ from dbgpt.core import (
ModelRequestContext,
SystemPromptTemplate,
)
from dbgpt.core.interface.file import FileStorageClient
from dbgpt.core.interface.media import MediaContent
from dbgpt.core.interface.message import (
HumanMessage,
@ -199,6 +200,7 @@ class BaseChat(ABC):
# will be compatible with all models
self._message_version = chat_param.message_version
self._chat_param = chat_param
self.fs_client = FileStorageClient.get_instance(system_app)
async def generate_input_values(self) -> Dict:
"""Generate input to LLM

View File

@ -151,6 +151,8 @@ hf_glm4 = [
"transformers>=4.51.3",
]
hf_kimi = [
"tiktoken",
"blobfile",
"transformers<4.51.3",
]

View File

@ -189,13 +189,22 @@ class StorageBackend(ABC):
storage_type: str = "__base__"
@abstractmethod
def save(self, bucket: str, file_id: str, file_data: BinaryIO) -> str:
def save(
self,
bucket: str,
file_id: str,
file_data: BinaryIO,
public_url: bool = False,
public_url_expire: Optional[int] = None,
) -> str:
"""Save the file data to the storage backend.
Args:
bucket (str): The bucket name
file_id (str): The file ID
file_data (BinaryIO): The file data
public_url (bool): Whether to generate a public URL
public_url_expire (Optional[int]): The expiration time for the public URL
Returns:
str: The storage path
@ -223,6 +232,21 @@ class StorageBackend(ABC):
bool: True if the file was deleted, False otherwise
"""
def get_public_url(
self, fm: FileMetadata, expire: Optional[int] = None
) -> Optional[str]:
"""Generate a public URL for an existing file.
Args:
fm (FileMetadata): The file metadata
expire (Optional[int], optional): Expiration time in seconds. Defaults to
class default.
Returns:
str: The generated public URL
"""
return None
@property
@abstractmethod
def save_chunk_size(self) -> int:
@ -249,7 +273,14 @@ class LocalFileStorage(StorageBackend):
"""Get the save chunk size."""
return self._save_chunk_size
def save(self, bucket: str, file_id: str, file_data: BinaryIO) -> str:
def save(
self,
bucket: str,
file_id: str,
file_data: BinaryIO,
public_url: bool = False,
public_url_expire: Optional[int] = None,
) -> str:
"""Save the file data to the local storage backend."""
bucket_path = os.path.join(self.base_path, bucket)
os.makedirs(bucket_path, exist_ok=True)
@ -321,6 +352,8 @@ class FileStorageSystem:
storage_type: str,
custom_metadata: Optional[Dict[str, Any]] = None,
file_id: Optional[str] = None,
public_url: bool = False,
public_url_expire: Optional[int] = None,
) -> str:
"""Save the file data to the storage backend."""
file_id = str(uuid.uuid4()) if not file_id else file_id
@ -337,7 +370,14 @@ class FileStorageSystem:
"storage_type": storage_type,
},
):
storage_path = backend.save(bucket, file_id, file_data)
storage_path = backend.save(
bucket,
file_id,
file_data,
public_url=public_url,
public_url_expire=public_url_expire,
)
file_data.seek(0, 2) # Move to the end of the file
file_size = file_data.tell() # Get the file size
file_data.seek(0) # Reset file pointer
@ -483,6 +523,39 @@ class FileStorageSystem:
return False
return False
def get_public_url(
self,
uri: str,
expire: Optional[int] = None,
) -> str:
"""Generate a public URL for an existing file.
Args:
uri (str): The file URI
expire (Optional[int], optional): Expiration time in seconds. Defaults to
None.
Returns:
str: The generated public URL, just return the original URI if the backend
does not support public URL generation.
"""
parsed_uri = FileStorageURI.parse(uri)
metadata = self.metadata_storage.load(
FileMetadataIdentifier(
file_id=parsed_uri.file_id, bucket=parsed_uri.bucket
),
FileMetadata,
)
if not metadata:
return None
backend = self.storage_backends.get(metadata.storage_type)
if not backend:
raise ValueError(f"Unsupported storage type: {metadata.storage_type}")
pub_url = backend.get_public_url(metadata, expire)
return pub_url if pub_url else uri
def list_files(
self, bucket: str, filters: Optional[Dict[str, Any]] = None
) -> List[FileMetadata]:
@ -729,6 +802,23 @@ class FileStorageClient(BaseComponent):
"""
return self.storage_system.list_files(bucket, filters)
def get_public_url(
self,
uri: str,
expire: Optional[int] = None,
) -> str:
"""Generate a public URL for an existing file.
Args:
uri (str): The file URI
expire (Optional[int], optional): Expiration time in seconds. Defaults to
None.
Returns:
str: The generated public URL
"""
return self.storage_system.get_public_url(uri, expire)
class SimpleDistributedStorage(StorageBackend):
"""Simple distributed storage backend."""
@ -768,7 +858,14 @@ class SimpleDistributedStorage(StorageBackend):
raise ValueError("Invalid storage path")
return storage_path.split("//")[1].split("/")[0]
def save(self, bucket: str, file_id: str, file_data: BinaryIO) -> str:
def save(
self,
bucket: str,
file_id: str,
file_data: BinaryIO,
public_url: bool = False,
public_url_expire: Optional[int] = None,
) -> str:
"""Save the file data to the distributed storage backend.
Just save the file locally.

View File

@ -43,6 +43,7 @@ class AliyunOSSStorage(StorageBackend):
bucket_prefix: str = "dbgpt-fs-",
bucket_mapper: Optional[Callable[[str], str]] = None,
auto_create_bucket: bool = True,
default_public_url_expire: int = 3600,
):
"""Initialize the Aliyun OSS storage backend.
@ -75,6 +76,7 @@ class AliyunOSSStorage(StorageBackend):
self.bucket_prefix = bucket_prefix
self.custom_bucket_mapper = bucket_mapper
self.auto_create_bucket = auto_create_bucket
self.default_public_url_expire = default_public_url_expire
# Initialize OSS authentication
if use_environment_credentials:
@ -223,13 +225,24 @@ class AliyunOSSStorage(StorageBackend):
f"Failed to get or create bucket for logical bucket {logical_bucket}"
)
def save(self, bucket: str, file_id: str, file_data: BinaryIO) -> str:
def save(
self,
bucket: str,
file_id: str,
file_data: BinaryIO,
public_url: bool = False,
public_url_expire: Optional[int] = None,
) -> str:
"""Save the file data to Aliyun OSS.
Args:
bucket (str): The logical bucket name
file_id (str): The file ID
file_data (BinaryIO): The file data
public_url (bool, optional): Whether to generate a public URL. Defaults to
False.
public_url_expire (Optional[int], optional): Expiration time for the public
URL in seconds. Defaults to None.
Returns:
str: The storage path (OSS URI)
@ -270,7 +283,29 @@ class AliyunOSSStorage(StorageBackend):
# Format: oss://{actual_bucket_name}/{object_name}
# We store both the actual bucket name and the object path in the URI
# But we'll also keep the logical bucket in the external URI format
return f"oss://{bucket}/{file_id}?actual_bucket={actual_bucket_name}&object_name={object_name}" # noqa
storage_path = f"oss://{bucket}/{file_id}?actual_bucket={actual_bucket_name}&object_name={object_name}" # noqa
# Generate a public URL if requested
if public_url:
# Use provided expiration time or default
expire_seconds = public_url_expire or self.default_public_url_expire
# Generate a signed URL for public access
try:
url = oss_bucket.sign_url(
"GET", object_name, expire_seconds, slash_safe=True
)
logger.info(
f"Generated public URL for {object_name} with expiration "
f"{expire_seconds} seconds"
)
return url
except oss2.exceptions.OssError as e:
logger.error(f"Failed to generate public URL for {object_name}: {e}")
# Fall back to returning the storage path
return storage_path
return storage_path
def _get_file_size(self, file_data: BinaryIO) -> int:
"""Get file size without consuming the file object.
@ -482,3 +517,58 @@ class AliyunOSSStorage(StorageBackend):
f" {e}"
)
return False
def get_public_url(
self, fm: FileMetadata, expire: Optional[int] = None
) -> Optional[str]:
"""Generate a public URL for an existing file.
Args:
fm (FileMetadata): The file metadata
expire (Optional[int], optional): Expiration time in seconds. Defaults to
class default.
Returns:
str: The generated public URL
"""
# Parse the storage path
path_info = self._parse_storage_path(fm.storage_path)
# Get actual bucket and object name
actual_bucket_name = path_info["actual_bucket"]
object_name = path_info["object_name"]
logical_bucket = path_info["logical_bucket"]
# If we couldn't determine the actual bucket from the URI, try with the logical
# bucket
if not actual_bucket_name and logical_bucket:
actual_bucket_name = self._map_bucket_name(logical_bucket)
# Use the file_id as object name if object_name is still None
if not object_name:
object_name = fm.file_id
# If using fixed bucket, prefix with logical bucket
if self.fixed_bucket and logical_bucket:
object_name = f"{logical_bucket}/{fm.file_id}"
# Get the bucket object
try:
oss_bucket = oss2.Bucket(
self.auth, self.endpoint, actual_bucket_name, region=self.region
)
# Use provided expiration time or default
expire_seconds = expire or self.default_public_url_expire
# Generate signed URL
url = oss_bucket.sign_url(
"GET", object_name, expire_seconds, slash_safe=True
)
logger.info(
f"Generated public URL for {object_name} with expiration "
f"{expire_seconds} seconds"
)
return url
except oss2.exceptions.OssError as e:
logger.error(f"Failed to generate public URL for {fm.file_id}: {e}")
raise

View File

@ -37,6 +37,7 @@ class S3Storage(StorageBackend):
auto_create_bucket: bool = True,
signature_version: Optional[str] = None,
s3_config: Optional[Dict[str, Union[str, int]]] = None,
default_public_url_expire: int = 3600, # Default to 1 hour
):
"""Initialize the S3 compatible storage backend.
@ -62,6 +63,8 @@ class S3Storage(StorageBackend):
signature_version (str, optional): S3 signature version to use.
s3_config (Optional[Dict[str, Union[str, int]]], optional): Additional
S3 configuration options. Defaults to None.
default_public_url_expire (int, optional): Default expiration time for
public URL in seconds. Defaults to 3600 (1 hour).
"""
self.endpoint_url = endpoint_url
self.region_name = region_name
@ -71,6 +74,7 @@ class S3Storage(StorageBackend):
self.custom_bucket_mapper = bucket_mapper
self.auto_create_bucket = auto_create_bucket
self.signature_version = signature_version
self.default_public_url_expire = default_public_url_expire
# Build S3 client configuration
if not s3_config:
@ -251,16 +255,27 @@ class S3Storage(StorageBackend):
logger.error(f"Failed to check bucket {bucket_name}: {e}")
return False
def save(self, bucket: str, file_id: str, file_data: BinaryIO) -> str:
def save(
self,
bucket: str,
file_id: str,
file_data: BinaryIO,
public_url: bool = False,
public_url_expire: Optional[int] = None,
) -> str:
"""Save the file data to S3.
Args:
bucket (str): The logical bucket name
file_id (str): The file ID
file_data (BinaryIO): The file data
public_url (bool, optional): Whether to generate a public URL. Defaults to
False.
public_url_expire (Optional[int], optional): Expiration time for the public
URL in seconds. Defaults to None.
Returns:
str: The storage path (S3 URI)
str: The storage path (S3 URI) or the public URL if public_url is True
"""
# Get the actual S3 bucket
actual_bucket_name = self._map_bucket_name(bucket)
@ -337,8 +352,32 @@ class S3Storage(StorageBackend):
)
raise
# Format: s3://{logical_bucket}/{file_id}?actual_bucket={actual_bucket_name}&object_key={object_key} # noqa
return f"s3://{bucket}/{file_id}?actual_bucket={actual_bucket_name}&object_key={object_key}" # noqa
# Standard storage path
storage_path = f"s3://{bucket}/{file_id}?actual_bucket={actual_bucket_name}&object_key={object_key}" # noqa
# Generate a public URL if requested
if public_url:
# Use provided expiration time or default
expire_seconds = public_url_expire or self.default_public_url_expire
try:
# Generate a pre-signed URL for public access
url = self.s3_client.generate_presigned_url(
"get_object",
Params={"Bucket": actual_bucket_name, "Key": object_key},
ExpiresIn=expire_seconds,
)
logger.info(
f"Generated public URL for {object_key} with expiration "
f"{expire_seconds} seconds"
)
return url
except ClientError as e:
logger.error(f"Failed to generate public URL for {object_key}: {e}")
# Fall back to returning the storage path
return storage_path
return storage_path
def _get_file_size(self, file_data: BinaryIO) -> int:
"""Get file size without consuming the file object.
@ -470,6 +509,58 @@ class S3Storage(StorageBackend):
"object_key": object_key,
}
def get_public_url(
self, fm: FileMetadata, expire: Optional[int] = None
) -> Optional[str]:
"""Generate a public URL for an existing file.
Args:
fm (FileMetadata): The file metadata
expire (Optional[int], optional): Expiration time in seconds. Defaults to
class default.
Returns:
str: The generated public URL
"""
# Parse the storage path
path_info = self._parse_storage_path(fm.storage_path)
# Get actual bucket and object key
actual_bucket_name = path_info["actual_bucket"]
object_key = path_info["object_key"]
logical_bucket = path_info["logical_bucket"]
# If we couldn't determine the actual bucket from the URI, try with the logical
# bucket
if not actual_bucket_name and logical_bucket:
actual_bucket_name = self._map_bucket_name(logical_bucket)
# Use the file_id as object key if object_key is still None
if not object_key:
object_key = fm.file_id
# If using fixed bucket, prefix with logical bucket
if self.fixed_bucket and logical_bucket:
object_key = f"{logical_bucket}/{fm.file_id}"
# Use provided expiration time or default
expire_seconds = expire or self.default_public_url_expire
try:
# Generate a pre-signed URL for public access
url = self.s3_client.generate_presigned_url(
"get_object",
Params={"Bucket": actual_bucket_name, "Key": object_key},
ExpiresIn=expire_seconds,
)
logger.info(
f"Generated public URL for {object_key} with expiration "
f"{expire_seconds} seconds"
)
return url
except ClientError as e:
logger.error(f"Failed to generate public URL for {fm.file_id}: {e}")
raise
def load(self, fm: FileMetadata) -> BinaryIO:
"""Load the file data from S3.

View File

@ -140,6 +140,10 @@ class Serve(BaseServe):
def replace_uri(self, uri: str) -> str:
"""Replace the uri with the new uri"""
try:
new_uri = self.file_storage_client.get_public_url(uri)
if new_uri != uri:
return new_uri
# If the uri is not changed, replace it with the new uri
parsed_uri = FileStorageURI.parse(uri)
bucket, file_id = parsed_uri.bucket, parsed_uri.file_id
node_address = self._serve_config.get_node_address()

View File

@ -40,15 +40,16 @@ Examples:
uv run version_update.py 0.8.0 --filter dbgpt-core # Only update dbgpt-core package
"""
import sys
import re
import json
import argparse
import tomli
from pathlib import Path
import json
import re
import sys
from dataclasses import dataclass
from pathlib import Path
from typing import List, Optional
import tomli
@dataclass
class VersionChange:

44
uv.lock
View File

@ -1853,6 +1853,21 @@ wheels = [
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/9a/91/4aea63dccee6491a54c630d9817656a886e086ab97222e2d8101d8cdf894/blis-0.7.11-cp312-cp312-win_amd64.whl", hash = "sha256:5a305dbfc96d202a20d0edd6edf74a406b7e1404f4fa4397d24c68454e60b1b4", size = 6624079 },
]
[[package]]
name = "blobfile"
version = "3.0.0"
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
dependencies = [
{ name = "filelock" },
{ name = "lxml" },
{ name = "pycryptodomex" },
{ name = "urllib3" },
]
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/9d/a9/a34e8153b0203d9060ff7aa5dfcd175e161117949697a83c4cc003b523ff/blobfile-3.0.0.tar.gz", hash = "sha256:32ec777414de7bb2a76ca812a838f0d33327ca28ae844a253503cde625cdf2f1", size = 77863 }
wheels = [
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ed/4d/1392562369b1139e741b30d624f09fe7091d17dd5579fae5732f044b12bb/blobfile-3.0.0-py3-none-any.whl", hash = "sha256:48ecc3307e622804bd8fe13bf6f40e6463c4439eba7a1f9ad49fd78aa63cc658", size = 75413 },
]
[[package]]
name = "boto3"
version = "1.37.34"
@ -2710,6 +2725,8 @@ hf-glm4 = [
{ name = "transformers", version = "4.51.3", source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" } },
]
hf-kimi = [
{ name = "blobfile" },
{ name = "tiktoken" },
{ name = "transformers", version = "4.51.2", source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" } },
]
llama-cpp = [
@ -2767,6 +2784,7 @@ requires-dist = [
{ name = "anthropic", marker = "extra == 'proxy-anthropic'" },
{ name = "auto-gpt-plugin-template", marker = "extra == 'agent'", specifier = ">=0.0.3" },
{ name = "auto-gpt-plugin-template", marker = "extra == 'framework'" },
{ name = "blobfile", marker = "extra == 'hf-kimi'" },
{ name = "cachetools" },
{ name = "chardet", specifier = "==5.1.0" },
{ name = "click", marker = "extra == 'cli'" },
@ -2826,6 +2844,7 @@ requires-dist = [
{ name = "sqlparse", marker = "extra == 'simple-framework'", specifier = "==0.4.4" },
{ name = "tenacity", marker = "extra == 'client'", specifier = "<=8.3.0" },
{ name = "termcolor", marker = "extra == 'agent'" },
{ name = "tiktoken", marker = "extra == 'hf-kimi'" },
{ name = "tiktoken", marker = "extra == 'proxy-openai'", specifier = ">=0.8.0" },
{ name = "tokenizers", marker = "extra == 'framework'", specifier = ">=0.14" },
{ name = "tomli", specifier = ">=2.2.1" },
@ -6416,7 +6435,7 @@ wheels = [
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/8c/de/8eb6fffecd9c5f129461edcdd7e1ac944f9de15783e3d89c84ed6e0374bc/lxml-5.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa837e6ee9534de8d63bc4c1249e83882a7ac22bd24523f83fad68e6ffdf41ae", size = 5652903 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/95/79/80f4102a08495c100014593680f3f0f7bd7c1333b13520aed855fc993326/lxml-5.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:da4c9223319400b97a2acdfb10926b807e51b69eb7eb80aad4942c0516934858", size = 5491813 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/15/f5/9b1f7edf6565ee31e4300edb1bcc61eaebe50a3cff4053c0206d8dc772f2/lxml-5.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dc0e9bdb3aa4d1de703a437576007d366b54f52c9897cae1a3716bb44fc1fc85", size = 5227837 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/5c/17/c31d94364c02e3492215658917f5590c00edce8074aeb06d05b7771465d9/lxml-5.3.2-cp310-cp310-win32.whl", hash = "sha256:5f94909a1022c8ea12711db7e08752ca7cf83e5b57a87b59e8a583c5f35016ad", size = 3477533 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/dd/53/a187c4ccfcd5fbfca01e6c96da39499d8b801ab5dcf57717db95d7a968a8/lxml-5.3.2-cp310-cp310-win32.win32.whl", hash = "sha256:dd755a0a78dd0b2c43f972e7b51a43be518ebc130c9f1a7c4480cf08b4385486", size = 3477533 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f2/2c/397c5a9d76a7a0faf9e5b13143ae1a7e223e71d2197a45da71c21aacb3d4/lxml-5.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:d64ea1686474074b38da13ae218d9fde0d1dc6525266976808f41ac98d9d7980", size = 3805160 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/84/b8/2b727f5a90902f7cc5548349f563b60911ca05f3b92e35dfa751349f265f/lxml-5.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9d61a7d0d208ace43986a92b111e035881c4ed45b1f5b7a270070acae8b0bfb4", size = 8163457 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/91/84/23135b2dc72b3440d68c8f39ace2bb00fe78e3a2255f7c74f7e76f22498e/lxml-5.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856dfd7eda0b75c29ac80a31a6411ca12209183e866c33faf46e77ace3ce8a79", size = 4433445 },
@ -9378,6 +9397,29 @@ wheels = [
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/3b/49/195842931f9ee6f14cd63ef85e06b93073463ed59601fb283ba9b813cd53/pycryptodome-3.22.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7514a1aebee8e85802d154fdb261381f1cb9b7c5a54594545145b8ec3056ae6", size = 1797436 },
]
[[package]]
name = "pycryptodomex"
version = "3.22.0"
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/ba/d5/861a7daada160fcf6b0393fb741eeb0d0910b039ad7f0cd56c39afdd4a20/pycryptodomex-3.22.0.tar.gz", hash = "sha256:a1da61bacc22f93a91cbe690e3eb2022a03ab4123690ab16c46abb693a9df63d", size = 4917584 }
wheels = [
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/62/c2/8c97e649ccd3886eaf4918bd87791d3b52e80ba5b9c4678e2b631f2f8340/pycryptodomex-3.22.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:aef4590263b9f2f6283469e998574d0bd45c14fb262241c27055b82727426157", size = 2494197 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f1/62/e947c35efebf95ba9bfe3fd76d766caa8d66d3f5d440fca05328c18b3352/pycryptodomex-3.22.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:5ac608a6dce9418d4f300fab7ba2f7d499a96b462f2b9b5c90d8d994cd36dcad", size = 1638999 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/51/af/f877f8ec1c4185e3ede3bf2beb286e5150099d2b3cba528c98d832372f38/pycryptodomex-3.22.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a24f681365ec9757ccd69b85868bbd7216ba451d0f86f6ea0eed75eeb6975db", size = 2181008 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/6f/72/e7e748c682c889f30a0a7c3072a27a002b50a6cf5912ad1ce1269e327f40/pycryptodomex-3.22.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:259664c4803a1fa260d5afb322972813c5fe30ea8b43e54b03b7e3a27b30856b", size = 2267300 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a9/ff/c45a97427aefbea07e8e6f2e08b10b4f2b287b99997bd22a4cef913e53a6/pycryptodomex-3.22.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7127d9de3c7ce20339e06bcd4f16f1a1a77f1471bcf04e3b704306dde101b719", size = 2306939 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/80/c7/cfbdd748a45b7fe8769a5494f130b092e9392e780ad204b5bc39c1a3a521/pycryptodomex-3.22.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee75067b35c93cc18b38af47b7c0664998d8815174cfc66dd00ea1e244eb27e6", size = 2180286 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/91/db/26f5d2af7cf809acfe1d1d7182a81fc0d0c13c26dd995b22c5b41be28bf9/pycryptodomex-3.22.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:1a8b0c5ba061ace4bcd03496d42702c3927003db805b8ec619ea6506080b381d", size = 2340887 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/a1/4c/78307b989d4855f806fff16424f837400e22df3695725f6aa45553e3a13c/pycryptodomex-3.22.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:bfe4fe3233ef3e58028a3ad8f28473653b78c6d56e088ea04fe7550c63d4d16b", size = 2265831 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fb/ad/cc69805083af164419a4413bc0ebc791e17103327da6979b14d5d3c7e7e5/pycryptodomex-3.22.0-cp37-abi3-win32.whl", hash = "sha256:2cac9ed5c343bb3d0075db6e797e6112514764d08d667c74cb89b931aac9dddd", size = 1766824 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/15/c8/79ab16e5b95a8988caee792236a776beceabcaa2518979d4e21b6ee20f57/pycryptodomex-3.22.0-cp37-abi3-win_amd64.whl", hash = "sha256:ff46212fda7ee86ec2f4a64016c994e8ad80f11ef748131753adb67e9b722ebd", size = 1797989 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/2d/27/4e49f8df0f983402b97c2a42f9482ffd747cbdaed8c0f7e6651d760bec42/pycryptodomex-3.22.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c8cffb03f5dee1026e3f892f7cffd79926a538c67c34f8b07c90c0bd5c834e27", size = 1622464 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ee/f4/8147561bd1970b1617dca321f2b0ca984c5c8a69ce5d39450b5a5bfa8912/pycryptodomex-3.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:140b27caa68a36d0501b05eb247bd33afa5f854c1ee04140e38af63c750d4e39", size = 1670534 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/c5/3a/af57daba1a9d7a0e3edb779db2a2e8db26fe5cd8a447f7a4cb4eb7bb1369/pycryptodomex-3.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:644834b1836bb8e1d304afaf794d5ae98a1d637bd6e140c9be7dd192b5374811", size = 1662794 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f8/62/aaca2aaaff8660a4a19598af40683b2e9294e78649253236b5cb592ebb04/pycryptodomex-3.22.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c506aba3318505dbeecf821ed7b9a9f86f422ed085e2d79c4fba0ae669920a", size = 1700798 },
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/03/2a/e28a5a6d4c3cd9b4bad96c928c3b0a69373aa690cbdb47380616ea9e1866/pycryptodomex-3.22.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7cd39f7a110c1ab97ce9ee3459b8bc615920344dc00e56d1b709628965fba3f2", size = 1801306 },
]
[[package]]
name = "pydantic"
version = "2.11.3"