mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-06 02:46:40 +00:00
refactor(dbgpts): Refactor dbgpts for 0.7.0 (#2397)
This commit is contained in:
parent
243e98123d
commit
d5a2a0bf3b
@ -43,6 +43,7 @@ base = [
|
|||||||
]
|
]
|
||||||
dbgpts = [
|
dbgpts = [
|
||||||
# For build dbgpts apps, we will be removed in the future.
|
# For build dbgpts apps, we will be removed in the future.
|
||||||
|
"build",
|
||||||
"poetry"
|
"poetry"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -187,6 +187,7 @@ try:
|
|||||||
list_installed_apps,
|
list_installed_apps,
|
||||||
list_repos,
|
list_repos,
|
||||||
new_dbgpts,
|
new_dbgpts,
|
||||||
|
reinstall,
|
||||||
remove_repo,
|
remove_repo,
|
||||||
update_repo,
|
update_repo,
|
||||||
)
|
)
|
||||||
@ -200,6 +201,7 @@ try:
|
|||||||
add_command_alias(update_repo, name="update", parent_group=repo)
|
add_command_alias(update_repo, name="update", parent_group=repo)
|
||||||
add_command_alias(app_install, name="install", parent_group=app)
|
add_command_alias(app_install, name="install", parent_group=app)
|
||||||
add_command_alias(app_uninstall, name="uninstall", parent_group=app)
|
add_command_alias(app_uninstall, name="uninstall", parent_group=app)
|
||||||
|
add_command_alias(reinstall, name="reinstall", parent_group=app)
|
||||||
add_command_alias(app_list_remote, name="list-remote", parent_group=app)
|
add_command_alias(app_list_remote, name="list-remote", parent_group=app)
|
||||||
add_command_alias(list_installed_apps, name="list", parent_group=app)
|
add_command_alias(list_installed_apps, name="list", parent_group=app)
|
||||||
add_command_alias(new_dbgpts, name="app", parent_group=new)
|
add_command_alias(new_dbgpts, name="app", parent_group=new)
|
||||||
|
@ -57,6 +57,9 @@ def get_device() -> str:
|
|||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
return "cpu"
|
return "cpu"
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
return "cpu"
|
||||||
|
|
||||||
|
|
||||||
LLM_MODEL_CONFIG = {
|
LLM_MODEL_CONFIG = {
|
||||||
"flan-t5-base": os.path.join(MODEL_PATH, "flan-t5-base"),
|
"flan-t5-base": os.path.join(MODEL_PATH, "flan-t5-base"),
|
||||||
|
@ -447,6 +447,7 @@ class ModelOutput:
|
|||||||
error_code: int = 0,
|
error_code: int = 0,
|
||||||
usage: Optional[Dict[str, Any]] = None,
|
usage: Optional[Dict[str, Any]] = None,
|
||||||
finish_reason: Optional[str] = None,
|
finish_reason: Optional[str] = None,
|
||||||
|
is_reasoning_model: bool = False,
|
||||||
) -> "ModelOutput":
|
) -> "ModelOutput":
|
||||||
if thinking and text:
|
if thinking and text:
|
||||||
# Has thinking and text
|
# Has thinking and text
|
||||||
@ -460,7 +461,12 @@ class ModelOutput:
|
|||||||
content = MediaContent.build_text(text)
|
content = MediaContent.build_text(text)
|
||||||
else:
|
else:
|
||||||
# Build a empty thinking content
|
# Build a empty thinking content
|
||||||
content = MediaContent.build_thinking(thinking)
|
# Handle empty data
|
||||||
|
content = (
|
||||||
|
MediaContent.build_thinking(thinking)
|
||||||
|
if is_reasoning_model
|
||||||
|
else MediaContent.build_text("")
|
||||||
|
)
|
||||||
return cls(
|
return cls(
|
||||||
error_code=error_code,
|
error_code=error_code,
|
||||||
content=content,
|
content=content,
|
||||||
|
@ -164,4 +164,5 @@ class LlamaCppModel:
|
|||||||
error_code=0,
|
error_code=0,
|
||||||
usage=usage,
|
usage=usage,
|
||||||
finish_reason=finish_reason,
|
finish_reason=finish_reason,
|
||||||
|
is_reasoning_model=is_reasoning_model,
|
||||||
)
|
)
|
||||||
|
@ -110,6 +110,7 @@ def chat_generate_stream(
|
|||||||
error_code=0,
|
error_code=0,
|
||||||
finish_reason=finish_reason,
|
finish_reason=finish_reason,
|
||||||
usage=r.usage,
|
usage=r.usage,
|
||||||
|
is_reasoning_model=is_reasoning_model,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -93,4 +93,5 @@ def huggingface_chat_generate_stream(
|
|||||||
msg.reasoning_content,
|
msg.reasoning_content,
|
||||||
error_code=0,
|
error_code=0,
|
||||||
usage=usage,
|
usage=usage,
|
||||||
|
is_reasoning_model=is_reasoning_model,
|
||||||
)
|
)
|
||||||
|
@ -117,4 +117,5 @@ async def generate_stream(
|
|||||||
error_code=0,
|
error_code=0,
|
||||||
usage=usage,
|
usage=usage,
|
||||||
finish_reason=finish_reason,
|
finish_reason=finish_reason,
|
||||||
|
is_reasoning_model=is_reasoning_model,
|
||||||
)
|
)
|
||||||
|
@ -38,7 +38,7 @@ try:
|
|||||||
from transformers.utils import is_torch_npu_available
|
from transformers.utils import is_torch_npu_available
|
||||||
|
|
||||||
transformers.is_torch_npu_available = is_torch_npu_available
|
transformers.is_torch_npu_available = is_torch_npu_available
|
||||||
except ImportError:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import functools
|
import functools
|
||||||
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@ -10,23 +11,95 @@ from .base import DEFAULT_PACKAGE_TYPES
|
|||||||
cl = CliLogger()
|
cl = CliLogger()
|
||||||
|
|
||||||
|
|
||||||
def check_poetry_installed():
|
def check_build_tools_installed():
|
||||||
|
"""Check if any supported build tools are installed (uv, poetry, build, or
|
||||||
|
setuptools)
|
||||||
|
|
||||||
|
Warns if uv is not installed but does not exit.
|
||||||
|
Only exits if no build tools are available.
|
||||||
|
"""
|
||||||
|
tools_available = []
|
||||||
|
|
||||||
|
# Check for uv (preferred tool)
|
||||||
|
if shutil.which("uv"):
|
||||||
|
try:
|
||||||
|
subprocess.run(
|
||||||
|
["uv", "--version"],
|
||||||
|
check=True,
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
)
|
||||||
|
tools_available.append("uv")
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Check for poetry
|
||||||
|
if shutil.which("poetry"):
|
||||||
|
try:
|
||||||
|
subprocess.run(
|
||||||
|
["poetry", "--version"],
|
||||||
|
check=True,
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
)
|
||||||
|
tools_available.append("poetry")
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Check for build package
|
||||||
try:
|
try:
|
||||||
# Check if poetry is installed
|
# Check if python and build are available
|
||||||
subprocess.run(
|
result = subprocess.run(
|
||||||
["poetry", "--version"],
|
["python", "-c", "import build; print('yes')"],
|
||||||
check=True,
|
check=True,
|
||||||
stdout=subprocess.DEVNULL,
|
capture_output=True,
|
||||||
stderr=subprocess.DEVNULL,
|
text=True,
|
||||||
)
|
)
|
||||||
|
if "yes" in result.stdout:
|
||||||
|
tools_available.append("build")
|
||||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||||
cl.error("Poetry is not installed. Please install Poetry to proceed.")
|
pass
|
||||||
# Exit with error
|
|
||||||
|
# Check for python with setuptools
|
||||||
|
try:
|
||||||
|
# Check if python and setuptools are available for basic package building
|
||||||
|
result = subprocess.run(
|
||||||
|
["python", "-c", "import setuptools; print('yes')"],
|
||||||
|
check=True,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
if "yes" in result.stdout:
|
||||||
|
tools_available.append("setuptools")
|
||||||
|
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Provide appropriate feedback based on available tools
|
||||||
|
if not tools_available:
|
||||||
|
cl.error("No build tools found. Please install one of the following:")
|
||||||
|
cl.error(" - uv: A fast Python package installer and resolver (recommended)")
|
||||||
|
cl.error(" - poetry: A Python package and dependency manager")
|
||||||
|
cl.error(" - build: A PEP 517 compatible Python package builder")
|
||||||
|
cl.error(" - setuptools: A classic Python package build system")
|
||||||
cl.error(
|
cl.error(
|
||||||
"Visit https://python-poetry.org/docs/#installation for installation "
|
"For uv: https://github.com/astral-sh/uv\n"
|
||||||
"instructions.",
|
"For poetry: https://python-poetry.org/docs/#installation\n"
|
||||||
|
"For build: pip install build\n"
|
||||||
|
"For setuptools: pip install setuptools",
|
||||||
exit_code=1,
|
exit_code=1,
|
||||||
)
|
)
|
||||||
|
elif "uv" not in tools_available:
|
||||||
|
cl.warning(
|
||||||
|
"uv is not installed. We recommend using uv for better performance."
|
||||||
|
" Install with: 'pip install uv' or visit https://github.com/astral-sh/uv"
|
||||||
|
)
|
||||||
|
if "build" not in tools_available:
|
||||||
|
cl.warning(
|
||||||
|
"build package is not installed. For better compatibility without uv,"
|
||||||
|
" we recommend installing it with: 'pip install build'"
|
||||||
|
)
|
||||||
|
|
||||||
|
return tools_available
|
||||||
|
|
||||||
|
|
||||||
def add_tap_options(func):
|
def add_tap_options(func):
|
||||||
@ -77,7 +150,7 @@ def install(repo: str | None, update: bool, names: list[str]):
|
|||||||
"""Install your dbgpts(operators,agents,workflows or apps)"""
|
"""Install your dbgpts(operators,agents,workflows or apps)"""
|
||||||
from .repo import _install_default_repos_if_no_repos, install
|
from .repo import _install_default_repos_if_no_repos, install
|
||||||
|
|
||||||
check_poetry_installed()
|
check_build_tools_installed()
|
||||||
_install_default_repos_if_no_repos()
|
_install_default_repos_if_no_repos()
|
||||||
for name in names:
|
for name in names:
|
||||||
install(name, repo, with_update=update)
|
install(name, repo, with_update=update)
|
||||||
@ -93,6 +166,17 @@ def uninstall(names: list[str]):
|
|||||||
uninstall(name)
|
uninstall(name)
|
||||||
|
|
||||||
|
|
||||||
|
@click.command(name="reinstall")
|
||||||
|
@add_add_common_options
|
||||||
|
@click.argument("names", type=str, nargs=-1)
|
||||||
|
def reinstall(repo: str | None, update: bool, names: list[str]):
|
||||||
|
"""Reinstall your dbgpts(operators,agents,workflows or apps)"""
|
||||||
|
from .repo import reinstall
|
||||||
|
|
||||||
|
for name in names:
|
||||||
|
reinstall(name, repo, with_update=update)
|
||||||
|
|
||||||
|
|
||||||
@click.command(name="list-remote")
|
@click.command(name="list-remote")
|
||||||
@add_add_common_options
|
@add_add_common_options
|
||||||
def list_all_apps(
|
def list_all_apps(
|
||||||
@ -253,7 +337,7 @@ def new_dbgpts(
|
|||||||
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
check_poetry_installed()
|
check_build_tools_installed()
|
||||||
from .template import create_template
|
from .template import create_template
|
||||||
|
|
||||||
create_template(name, label, description, type, definition_type, directory)
|
create_template(name, label, description, type, definition_type, directory)
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
import functools
|
import functools
|
||||||
|
import importlib.metadata
|
||||||
|
import importlib.util
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
@ -28,6 +31,183 @@ _DEFAULT_REPO = "eosphoros/dbgpts"
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_poetry_project(package_path: Path) -> bool:
|
||||||
|
"""Check if the project uses poetry build system by examining pyproject.toml
|
||||||
|
|
||||||
|
Args:
|
||||||
|
package_path (Path): Path to the package directory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the project uses poetry, False otherwise
|
||||||
|
"""
|
||||||
|
pyproject_path = package_path / "pyproject.toml"
|
||||||
|
if not pyproject_path.exists():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
import tomli # Python 3.11+ uses tomllib from stdlib
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import tomlkit as tomli
|
||||||
|
except ImportError:
|
||||||
|
import tomli
|
||||||
|
|
||||||
|
with open(pyproject_path, "rb") as f:
|
||||||
|
pyproject_data = tomli.load(f)
|
||||||
|
|
||||||
|
# Check if poetry is defined in build-system
|
||||||
|
build_system = pyproject_data.get("build-system", {})
|
||||||
|
build_backend = build_system.get("build-backend", "")
|
||||||
|
requires = build_system.get("requires", [])
|
||||||
|
|
||||||
|
return (
|
||||||
|
"poetry" in build_backend.lower()
|
||||||
|
or any("poetry" in req.lower() for req in requires)
|
||||||
|
or "tool" in pyproject_data
|
||||||
|
and "poetry" in pyproject_data.get("tool", {})
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_module_installed(module_name: str) -> bool:
|
||||||
|
"""Check if a Python module is installed
|
||||||
|
|
||||||
|
Args:
|
||||||
|
module_name (str): Module name to check
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if installed, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
importlib.metadata.distribution(module_name)
|
||||||
|
return True
|
||||||
|
except importlib.metadata.PackageNotFoundError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _build_package(
|
||||||
|
install_path: Path, is_poetry: bool, log_func=logger.info, error_func=logger.error
|
||||||
|
) -> tuple[bool, str]:
|
||||||
|
"""Build package using poetry or build module
|
||||||
|
|
||||||
|
Args:
|
||||||
|
install_path (Path): Path to the package
|
||||||
|
is_poetry (bool): Whether to use poetry for build
|
||||||
|
log_func: Function to use for logging info
|
||||||
|
error_func: Function to use for logging errors
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: (success, error_message)
|
||||||
|
"""
|
||||||
|
python_executable = sys.executable
|
||||||
|
os.chdir(install_path)
|
||||||
|
|
||||||
|
if is_poetry and shutil.which("poetry"):
|
||||||
|
log_func("Building with poetry...")
|
||||||
|
process = subprocess.Popen(
|
||||||
|
["poetry", "build"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Check if build module is available
|
||||||
|
if _is_module_installed("build"):
|
||||||
|
log_func("Building with python -m build...")
|
||||||
|
process = subprocess.Popen(
|
||||||
|
[python_executable, "-m", "build", "--wheel"],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Fallback to setuptools directly if available
|
||||||
|
if (install_path / "setup.py").exists():
|
||||||
|
log_func("Building with setuptools directly...")
|
||||||
|
process = subprocess.Popen(
|
||||||
|
[python_executable, "setup.py", "bdist_wheel"],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
error_msg = (
|
||||||
|
"No suitable build method found. Please install build: "
|
||||||
|
"pip install build"
|
||||||
|
)
|
||||||
|
error_func(error_msg)
|
||||||
|
return False, error_msg
|
||||||
|
|
||||||
|
out, err = process.communicate()
|
||||||
|
log_func(f"Build output: {out.decode('utf-8', errors='replace')}")
|
||||||
|
if err:
|
||||||
|
error_msg = f"Build warnings/errors: {err.decode('utf-8', errors='replace')}"
|
||||||
|
log_func(error_msg)
|
||||||
|
|
||||||
|
return process.returncode == 0, "" if process.returncode == 0 else error_msg
|
||||||
|
|
||||||
|
|
||||||
|
def _install_wheel(
|
||||||
|
wheel_path: str, log_func=logger.info, error_func=logger.error
|
||||||
|
) -> tuple[bool, str]:
|
||||||
|
"""Install wheel file using the best available method
|
||||||
|
|
||||||
|
Args:
|
||||||
|
wheel_path (str): Path to the wheel file
|
||||||
|
log_func: Function to use for logging info
|
||||||
|
error_func: Function to use for logging errors
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple: (success, error_message)
|
||||||
|
"""
|
||||||
|
python_executable = sys.executable
|
||||||
|
|
||||||
|
# Try uv first if available (faster)
|
||||||
|
if shutil.which("uv"):
|
||||||
|
log_func(f"Installing wheel with uv: {wheel_path}")
|
||||||
|
process = subprocess.Popen(
|
||||||
|
["uv", "pip", "install", wheel_path],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
out, err = process.communicate()
|
||||||
|
if process.returncode == 0:
|
||||||
|
return True, ""
|
||||||
|
log_func(f"uv install failed: {err.decode('utf-8', errors='replace')}")
|
||||||
|
# Fall through to pip if uv fails
|
||||||
|
|
||||||
|
# Try pip
|
||||||
|
if shutil.which("pip") or _is_module_installed("pip"):
|
||||||
|
log_func(f"Installing wheel with pip: {wheel_path}")
|
||||||
|
process = subprocess.Popen(
|
||||||
|
[python_executable, "-m", "pip", "install", wheel_path],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
out, err = process.communicate()
|
||||||
|
if process.returncode == 0:
|
||||||
|
return True, ""
|
||||||
|
log_func(f"Pip install failed: {err.decode('utf-8', errors='replace')}")
|
||||||
|
|
||||||
|
# Alternative: Try using importlib directly
|
||||||
|
try:
|
||||||
|
import site
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
log_func(f"Attempting manual wheel installation for {wheel_path}")
|
||||||
|
|
||||||
|
# Get the site-packages directory
|
||||||
|
site_packages = site.getsitepackages()[0]
|
||||||
|
|
||||||
|
# Extract the wheel contents to site-packages
|
||||||
|
with zipfile.ZipFile(wheel_path, "r") as wheel_zip:
|
||||||
|
# Extract only the Python modules (*.py, *.so, etc.)
|
||||||
|
for file in wheel_zip.namelist():
|
||||||
|
if ".dist-info/" not in file and file.endswith((".py", ".so", ".pyd")):
|
||||||
|
wheel_zip.extract(file, site_packages)
|
||||||
|
|
||||||
|
log_func(f"Manual installation to {site_packages} completed")
|
||||||
|
return True, ""
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Manual installation failed: {str(e)}"
|
||||||
|
error_func(error_msg)
|
||||||
|
return False, error_msg
|
||||||
|
|
||||||
|
|
||||||
@functools.cache
|
@functools.cache
|
||||||
def list_repos() -> List[str]:
|
def list_repos() -> List[str]:
|
||||||
"""List all repos
|
"""List all repos
|
||||||
@ -234,6 +414,20 @@ def uninstall(name: str):
|
|||||||
cl.info(f"Uninstalling dbgpt '{name}'...")
|
cl.info(f"Uninstalling dbgpt '{name}'...")
|
||||||
|
|
||||||
|
|
||||||
|
def reinstall(
|
||||||
|
name: str,
|
||||||
|
repo: str | None = None,
|
||||||
|
with_update: bool = True,
|
||||||
|
):
|
||||||
|
"""Reinstall the specified dbgpt
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str): The name of the dbgpt
|
||||||
|
"""
|
||||||
|
uninstall(name)
|
||||||
|
install(name, repo, with_update=with_update)
|
||||||
|
|
||||||
|
|
||||||
def inner_uninstall(name: str):
|
def inner_uninstall(name: str):
|
||||||
"""Uninstall the specified dbgpt
|
"""Uninstall the specified dbgpt
|
||||||
|
|
||||||
@ -271,35 +465,34 @@ def inner_copy_and_install(repo: str, name: str, package_path: Path):
|
|||||||
try:
|
try:
|
||||||
shutil.copytree(package_path, install_path)
|
shutil.copytree(package_path, install_path)
|
||||||
logger.info(f"Installing dbgpts '{name}' from {repo}...")
|
logger.info(f"Installing dbgpts '{name}' from {repo}...")
|
||||||
os.chdir(install_path)
|
|
||||||
process = subprocess.Popen(
|
# Check if it's a poetry project
|
||||||
["poetry", "build"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
is_poetry = _is_poetry_project(install_path)
|
||||||
)
|
|
||||||
out, err = process.communicate()
|
# Build the package
|
||||||
logger.info(f"{out},{err}")
|
build_success = _build_package(install_path, is_poetry)
|
||||||
|
if not build_success:
|
||||||
|
raise ValueError("Failed to build the package")
|
||||||
|
|
||||||
wheel_files = list(install_path.glob("dist/*.whl"))
|
wheel_files = list(install_path.glob("dist/*.whl"))
|
||||||
if not wheel_files:
|
if not wheel_files:
|
||||||
logger.error(
|
logger.error("No wheel file found after building the package.")
|
||||||
"No wheel file found after building the package.",
|
|
||||||
)
|
|
||||||
raise ValueError("No wheel file found after building the package.")
|
raise ValueError("No wheel file found after building the package.")
|
||||||
# Install the wheel file using pip
|
|
||||||
|
# Install the wheel file
|
||||||
wheel_file = wheel_files[0]
|
wheel_file = wheel_files[0]
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Installing dbgpts '{name}' wheel file {_print_path(wheel_file)}..."
|
f"Installing dbgpts '{name}' wheel file {_print_path(wheel_file)}..."
|
||||||
)
|
)
|
||||||
# subprocess.run(["pip", "install", str(wheel_file)], check=True)
|
|
||||||
process = subprocess.Popen(
|
install_success = _install_wheel(str(wheel_file))
|
||||||
["pip", "install", str(wheel_file)],
|
if not install_success:
|
||||||
stdout=subprocess.PIPE,
|
raise ValueError(f"Failed to install wheel file: {wheel_file}")
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
)
|
|
||||||
out, err = process.communicate()
|
|
||||||
logger.info(f"{out},{err}")
|
|
||||||
_write_install_metadata(name, repo, install_path)
|
_write_install_metadata(name, repo, install_path)
|
||||||
logger.info(f"Installed dbgpts at {_print_path(install_path)}.")
|
logger.info(f"Installed dbgpts at {_print_path(install_path)}.")
|
||||||
logger.info(f"dbgpts '{name}' installed successfully.")
|
logger.info(f"dbgpts '{name}' installed successfully.")
|
||||||
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if install_path.exists():
|
if install_path.exists():
|
||||||
shutil.rmtree(install_path)
|
shutil.rmtree(install_path)
|
||||||
@ -307,6 +500,15 @@ def inner_copy_and_install(repo: str, name: str, package_path: Path):
|
|||||||
|
|
||||||
|
|
||||||
def copy_and_install(repo: str, name: str, package_path: Path):
|
def copy_and_install(repo: str, name: str, package_path: Path):
|
||||||
|
"""Install the specified dbgpt from a repository
|
||||||
|
|
||||||
|
This function reuses the inner implementation but provides CLI feedback
|
||||||
|
|
||||||
|
Args:
|
||||||
|
repo (str): The name of the repo
|
||||||
|
name (str): The name of the dbgpt
|
||||||
|
package_path (Path): The path to the package
|
||||||
|
"""
|
||||||
if not package_path.exists():
|
if not package_path.exists():
|
||||||
cl.error(
|
cl.error(
|
||||||
f"The specified dbgpt '{name}' does not exist in the {repo} tap.",
|
f"The specified dbgpt '{name}' does not exist in the {repo} tap.",
|
||||||
@ -322,15 +524,33 @@ def copy_and_install(repo: str, name: str, package_path: Path):
|
|||||||
try:
|
try:
|
||||||
shutil.copytree(package_path, install_path)
|
shutil.copytree(package_path, install_path)
|
||||||
cl.info(f"Installing dbgpts '{name}' from {repo}...")
|
cl.info(f"Installing dbgpts '{name}' from {repo}...")
|
||||||
os.chdir(install_path)
|
|
||||||
subprocess.run(["poetry", "build"], check=True)
|
# Check if it's a poetry project
|
||||||
|
is_poetry = _is_poetry_project(install_path)
|
||||||
|
|
||||||
|
# Build the package using the shared implementation but with CLI logging
|
||||||
|
build_success, build_error = _build_package(
|
||||||
|
install_path, is_poetry, log_func=cl.info, error_func=cl.error
|
||||||
|
)
|
||||||
|
|
||||||
|
if not build_success:
|
||||||
|
cl.error(f"Failed to build the package: {build_error}", exit_code=1)
|
||||||
|
|
||||||
wheel_files = list(install_path.glob("dist/*.whl"))
|
wheel_files = list(install_path.glob("dist/*.whl"))
|
||||||
if not wheel_files:
|
if not wheel_files:
|
||||||
cl.error("No wheel file found after building the package.", exit_code=1)
|
cl.error("No wheel file found after building the package.", exit_code=1)
|
||||||
# Install the wheel file using pip
|
|
||||||
|
# Install the wheel file using the shared implementation but with CLI logging
|
||||||
wheel_file = wheel_files[0]
|
wheel_file = wheel_files[0]
|
||||||
cl.info(f"Installing dbgpts '{name}' wheel file {_print_path(wheel_file)}...")
|
cl.info(f"Installing dbgpts '{name}' wheel file {_print_path(wheel_file)}...")
|
||||||
subprocess.run(["pip", "install", str(wheel_file)], check=True)
|
|
||||||
|
install_success, install_error = _install_wheel(
|
||||||
|
str(wheel_file), log_func=cl.info, error_func=cl.error
|
||||||
|
)
|
||||||
|
|
||||||
|
if not install_success:
|
||||||
|
cl.error(f"Failed to install wheel file: {install_error}", exit_code=1)
|
||||||
|
|
||||||
_write_install_metadata(name, repo, install_path)
|
_write_install_metadata(name, repo, install_path)
|
||||||
cl.success(f"Installed dbgpts at {_print_path(install_path)}.")
|
cl.success(f"Installed dbgpts at {_print_path(install_path)}.")
|
||||||
cl.success(f"dbgpts '{name}' installed successfully.")
|
cl.success(f"dbgpts '{name}' installed successfully.")
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@ -90,7 +91,9 @@ def _create_flow_template(
|
|||||||
if definition_type == "json":
|
if definition_type == "json":
|
||||||
json_dict["json_config"] = {"file_path": "definition/flow_definition.json"}
|
json_dict["json_config"] = {"file_path": "definition/flow_definition.json"}
|
||||||
|
|
||||||
_create_poetry_project(working_directory, name)
|
# Create project structure
|
||||||
|
_create_project_structure(working_directory, name, base_metadata.get("description"))
|
||||||
|
|
||||||
_write_dbgpts_toml(working_directory, name, json_dict)
|
_write_dbgpts_toml(working_directory, name, json_dict)
|
||||||
_write_manifest_file(working_directory, name, mod_name)
|
_write_manifest_file(working_directory, name, mod_name)
|
||||||
|
|
||||||
@ -120,8 +123,9 @@ def _create_operator_template(
|
|||||||
f"Unsupported definition type: {definition_type} for dbgpts type: "
|
f"Unsupported definition type: {definition_type} for dbgpts type: "
|
||||||
f"{dbgpts_type}"
|
f"{dbgpts_type}"
|
||||||
)
|
)
|
||||||
|
# Create project structure
|
||||||
|
_create_project_structure(working_directory, name, base_metadata.get("description"))
|
||||||
|
|
||||||
_create_poetry_project(working_directory, name)
|
|
||||||
_write_dbgpts_toml(working_directory, name, json_dict)
|
_write_dbgpts_toml(working_directory, name, json_dict)
|
||||||
_write_operator_init_file(working_directory, name, mod_name)
|
_write_operator_init_file(working_directory, name, mod_name)
|
||||||
_write_manifest_file(working_directory, name, mod_name)
|
_write_manifest_file(working_directory, name, mod_name)
|
||||||
@ -146,7 +150,9 @@ def _create_agent_template(
|
|||||||
f"{dbgpts_type}"
|
f"{dbgpts_type}"
|
||||||
)
|
)
|
||||||
|
|
||||||
_create_poetry_project(working_directory, name)
|
# Create project structure
|
||||||
|
_create_project_structure(working_directory, name, base_metadata.get("description"))
|
||||||
|
|
||||||
_write_dbgpts_toml(working_directory, name, json_dict)
|
_write_dbgpts_toml(working_directory, name, json_dict)
|
||||||
_write_agent_init_file(working_directory, name, mod_name)
|
_write_agent_init_file(working_directory, name, mod_name)
|
||||||
_write_manifest_file(working_directory, name, mod_name)
|
_write_manifest_file(working_directory, name, mod_name)
|
||||||
@ -171,17 +177,122 @@ def _create_resource_template(
|
|||||||
f"{dbgpts_type}"
|
f"{dbgpts_type}"
|
||||||
)
|
)
|
||||||
|
|
||||||
_create_poetry_project(working_directory, name)
|
# Create project structure
|
||||||
|
_create_project_structure(working_directory, name, base_metadata.get("description"))
|
||||||
|
|
||||||
_write_dbgpts_toml(working_directory, name, json_dict)
|
_write_dbgpts_toml(working_directory, name, json_dict)
|
||||||
_write_resource_init_file(working_directory, name, mod_name)
|
_write_resource_init_file(working_directory, name, mod_name)
|
||||||
_write_manifest_file(working_directory, name, mod_name)
|
_write_manifest_file(working_directory, name, mod_name)
|
||||||
|
|
||||||
|
|
||||||
def _create_poetry_project(working_directory: str, name: str):
|
def _create_project_structure(
|
||||||
"""Create a new poetry project"""
|
working_directory: str, name: str, description: str = None
|
||||||
|
):
|
||||||
|
"""Create a new project using uv, poetry or manual file creation
|
||||||
|
|
||||||
|
Args:
|
||||||
|
working_directory (str): Directory to create the project in
|
||||||
|
name (str): Name of the project
|
||||||
|
description (str, optional): Project description
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if project created successfully
|
||||||
|
"""
|
||||||
os.chdir(working_directory)
|
os.chdir(working_directory)
|
||||||
subprocess.run(["poetry", "new", name, "-n"], check=True)
|
|
||||||
|
# Try uv first
|
||||||
|
if shutil.which("uv"):
|
||||||
|
try:
|
||||||
|
cmd = ["uv", "init", "--no-workspace"]
|
||||||
|
|
||||||
|
if description:
|
||||||
|
cmd.extend(["--description", description])
|
||||||
|
|
||||||
|
cmd.append(name)
|
||||||
|
|
||||||
|
subprocess.run(cmd, check=True)
|
||||||
|
return True
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
click.echo("Warning: Failed to create project with uv, trying poetry...")
|
||||||
|
|
||||||
|
# Try poetry next
|
||||||
|
if shutil.which("poetry"):
|
||||||
|
try:
|
||||||
|
subprocess.run(["poetry", "new", name, "-n"], check=True)
|
||||||
|
|
||||||
|
# If description provided, update pyproject.toml
|
||||||
|
if description:
|
||||||
|
pyproject_path = Path(working_directory) / name / "pyproject.toml"
|
||||||
|
if pyproject_path.exists():
|
||||||
|
_update_pyproject_description(pyproject_path, description)
|
||||||
|
|
||||||
|
return True
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
click.echo(
|
||||||
|
"Warning: Failed to create project with poetry, creating files "
|
||||||
|
"manually..."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Manual creation as fallback
|
||||||
|
project_dir = Path(working_directory) / name
|
||||||
|
project_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Create basic project structure
|
||||||
|
_create_manual_project_structure(project_dir, name, description)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _update_pyproject_description(pyproject_path, description):
|
||||||
|
"""Update description in pyproject.toml file"""
|
||||||
|
try:
|
||||||
|
import tomlkit
|
||||||
|
|
||||||
|
with open(pyproject_path, "r") as f:
|
||||||
|
pyproject = tomlkit.parse(f.read())
|
||||||
|
|
||||||
|
if "tool" in pyproject and "poetry" in pyproject["tool"]:
|
||||||
|
pyproject["tool"]["poetry"]["description"] = description
|
||||||
|
elif "project" in pyproject:
|
||||||
|
pyproject["project"]["description"] = description
|
||||||
|
|
||||||
|
with open(pyproject_path, "w") as f:
|
||||||
|
f.write(tomlkit.dumps(pyproject))
|
||||||
|
except Exception as e:
|
||||||
|
click.echo(f"Warning: Failed to update description in pyproject.toml: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def _create_manual_project_structure(project_dir, name, description=None):
|
||||||
|
"""Create manual project structure with necessary files"""
|
||||||
|
mod_name = name.replace("-", "_")
|
||||||
|
|
||||||
|
# Create module directory
|
||||||
|
module_dir = project_dir / mod_name
|
||||||
|
module_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Create __init__.py
|
||||||
|
with open(module_dir / "__init__.py", "w") as f:
|
||||||
|
f.write(f'"""Main module for {name}."""\n\n')
|
||||||
|
|
||||||
|
# Create pyproject.toml
|
||||||
|
pyproject_content = f"""[project]
|
||||||
|
name = "{name}"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "{description or f"A {name} package"}"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.8"
|
||||||
|
dependencies = []
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
"""
|
||||||
|
with open(project_dir / "pyproject.toml", "w") as f:
|
||||||
|
f.write(pyproject_content)
|
||||||
|
|
||||||
|
# Create README.md
|
||||||
|
with open(project_dir / "README.md", "w") as f:
|
||||||
|
f.write(f"# {name}\n\n{description or f'A {name} package'}\n")
|
||||||
|
|
||||||
|
|
||||||
def _write_dbgpts_toml(working_directory: str, name: str, json_data: dict):
|
def _write_dbgpts_toml(working_directory: str, name: str, json_data: dict):
|
||||||
|
@ -120,7 +120,7 @@ def get_device_info() -> Tuple[str, str, str, int, str]:
|
|||||||
device_count = torch.cuda.device_count()
|
device_count = torch.cuda.device_count()
|
||||||
elif torch.backends.mps.is_available():
|
elif torch.backends.mps.is_available():
|
||||||
device = "mps"
|
device = "mps"
|
||||||
except ModuleNotFoundError:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not device_version:
|
if not device_version:
|
||||||
|
@ -20,80 +20,82 @@ def _generate_dbgpts_zip(package_name: str, flow: ServeRequest) -> io.BytesIO:
|
|||||||
flow_label = flow.label
|
flow_label = flow.label
|
||||||
flow_description = flow.description
|
flow_description = flow.description
|
||||||
dag_json = json.dumps(flow.flow_data.dict(), indent=4, ensure_ascii=False)
|
dag_json = json.dumps(flow.flow_data.dict(), indent=4, ensure_ascii=False)
|
||||||
|
|
||||||
with zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED, False) as zip_file:
|
with zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED, False) as zip_file:
|
||||||
|
# Create MANIFEST.in
|
||||||
manifest = f"include dbgpts.toml\ninclude {flow_name}/definition/*.json"
|
manifest = f"include dbgpts.toml\ninclude {flow_name}/definition/*.json"
|
||||||
readme = f"# {flow_label}\n\n{flow_description}"
|
|
||||||
zip_file.writestr(f"{package_name}/MANIFEST.in", manifest)
|
zip_file.writestr(f"{package_name}/MANIFEST.in", manifest)
|
||||||
|
|
||||||
|
# Create README.md
|
||||||
|
readme = f"# {flow_label}\n\n{flow_description}"
|
||||||
zip_file.writestr(f"{package_name}/README.md", readme)
|
zip_file.writestr(f"{package_name}/README.md", readme)
|
||||||
|
|
||||||
|
# Create module __init__.py
|
||||||
zip_file.writestr(
|
zip_file.writestr(
|
||||||
f"{package_name}/{flow_name}/__init__.py",
|
f"{package_name}/{flow_name}/__init__.py",
|
||||||
"",
|
"",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Create flow definition json
|
||||||
zip_file.writestr(
|
zip_file.writestr(
|
||||||
f"{package_name}/{flow_name}/definition/flow_definition.json",
|
f"{package_name}/{flow_name}/definition/flow_definition.json",
|
||||||
dag_json,
|
dag_json,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Create dbgpts.toml
|
||||||
dbgpts_toml = tomlkit.document()
|
dbgpts_toml = tomlkit.document()
|
||||||
# Add flow information
|
# Add flow information
|
||||||
dbgpts_flow_toml = tomlkit.document()
|
dbgpts_flow_toml = tomlkit.document()
|
||||||
dbgpts_flow_toml.add("label", "Simple Streaming Chat")
|
dbgpts_flow_toml.add("label", flow_label)
|
||||||
name_with_comment = tomlkit.string("awel_flow_simple_streaming_chat")
|
name_with_comment = tomlkit.string(flow_name)
|
||||||
name_with_comment.comment("A unique name for all dbgpts")
|
name_with_comment.comment("A unique name for all dbgpts")
|
||||||
dbgpts_flow_toml.add("name", name_with_comment)
|
dbgpts_flow_toml.add("name", name_with_comment)
|
||||||
|
|
||||||
dbgpts_flow_toml.add("version", "0.1.0")
|
dbgpts_flow_toml.add("version", "0.1.0")
|
||||||
dbgpts_flow_toml.add(
|
dbgpts_flow_toml.add(
|
||||||
"description",
|
"description",
|
||||||
flow_description,
|
flow_description,
|
||||||
)
|
)
|
||||||
dbgpts_flow_toml.add("authors", [])
|
dbgpts_flow_toml.add("authors", [])
|
||||||
|
|
||||||
definition_type_with_comment = tomlkit.string("json")
|
definition_type_with_comment = tomlkit.string("json")
|
||||||
definition_type_with_comment.comment("How to define the flow, python or json")
|
definition_type_with_comment.comment("How to define the flow, python or json")
|
||||||
dbgpts_flow_toml.add("definition_type", definition_type_with_comment)
|
dbgpts_flow_toml.add("definition_type", definition_type_with_comment)
|
||||||
|
|
||||||
dbgpts_toml.add("flow", dbgpts_flow_toml)
|
dbgpts_toml.add("flow", dbgpts_flow_toml)
|
||||||
|
|
||||||
# Add python and json config
|
# Add python and json config
|
||||||
python_config = tomlkit.table()
|
python_config = tomlkit.table()
|
||||||
dbgpts_toml.add("python_config", python_config)
|
dbgpts_toml.add("python_config", python_config)
|
||||||
|
|
||||||
json_config = tomlkit.table()
|
json_config = tomlkit.table()
|
||||||
json_config.add("file_path", "definition/flow_definition.json")
|
json_config.add("file_path", "definition/flow_definition.json")
|
||||||
json_config.comment("Json config")
|
json_config.comment("Json config")
|
||||||
|
|
||||||
dbgpts_toml.add("json_config", json_config)
|
dbgpts_toml.add("json_config", json_config)
|
||||||
|
|
||||||
# Transform to string
|
# Transform dbgpts.toml to string
|
||||||
toml_string = tomlkit.dumps(dbgpts_toml)
|
toml_string = tomlkit.dumps(dbgpts_toml)
|
||||||
zip_file.writestr(f"{package_name}/dbgpts.toml", toml_string)
|
zip_file.writestr(f"{package_name}/dbgpts.toml", toml_string)
|
||||||
|
|
||||||
|
# Create pyproject.toml (uv style)
|
||||||
pyproject_toml = tomlkit.document()
|
pyproject_toml = tomlkit.document()
|
||||||
|
|
||||||
# Add [tool.poetry] section
|
# Add [project] section (modern PEP 621 format used by uv)
|
||||||
tool_poetry_toml = tomlkit.table()
|
project_section = tomlkit.table()
|
||||||
tool_poetry_toml.add("name", package_name)
|
project_section.add("name", package_name)
|
||||||
tool_poetry_toml.add("version", "0.1.0")
|
project_section.add("version", "0.1.0")
|
||||||
tool_poetry_toml.add("description", "A dbgpts package")
|
project_section.add("description", flow_description or "A dbgpts package")
|
||||||
tool_poetry_toml.add("authors", [])
|
project_section.add("readme", "README.md")
|
||||||
tool_poetry_toml.add("readme", "README.md")
|
project_section.add("requires-python", ">=3.8")
|
||||||
pyproject_toml["tool"] = tomlkit.table()
|
project_section.add("dependencies", [])
|
||||||
pyproject_toml["tool"]["poetry"] = tool_poetry_toml
|
pyproject_toml["project"] = project_section
|
||||||
|
|
||||||
# Add [tool.poetry.dependencies] section
|
|
||||||
dependencies = tomlkit.table()
|
|
||||||
dependencies.add("python", "^3.10")
|
|
||||||
pyproject_toml["tool"]["poetry"]["dependencies"] = dependencies
|
|
||||||
|
|
||||||
# Add [build-system] section
|
# Add [build-system] section
|
||||||
build_system = tomlkit.table()
|
build_system = tomlkit.table()
|
||||||
build_system.add("requires", ["poetry-core"])
|
build_system.add("requires", ["setuptools>=61.0"])
|
||||||
build_system.add("build-backend", "poetry.core.masonry.api")
|
build_system.add("build-backend", "setuptools.build_meta")
|
||||||
pyproject_toml["build-system"] = build_system
|
pyproject_toml["build-system"] = build_system
|
||||||
|
|
||||||
# Transform to string
|
# Transform to string
|
||||||
pyproject_toml_string = tomlkit.dumps(pyproject_toml)
|
pyproject_toml_string = tomlkit.dumps(pyproject_toml)
|
||||||
zip_file.writestr(f"{package_name}/pyproject.toml", pyproject_toml_string)
|
zip_file.writestr(f"{package_name}/pyproject.toml", pyproject_toml_string)
|
||||||
|
|
||||||
zip_buffer.seek(0)
|
zip_buffer.seek(0)
|
||||||
return zip_buffer
|
return zip_buffer
|
||||||
|
|
||||||
|
2
uv.lock
2
uv.lock
@ -1799,6 +1799,7 @@ cache = [
|
|||||||
{ name = "rocksdict" },
|
{ name = "rocksdict" },
|
||||||
]
|
]
|
||||||
dbgpts = [
|
dbgpts = [
|
||||||
|
{ name = "build" },
|
||||||
{ name = "poetry" },
|
{ name = "poetry" },
|
||||||
]
|
]
|
||||||
observability = [
|
observability = [
|
||||||
@ -1810,6 +1811,7 @@ observability = [
|
|||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "aiofiles" },
|
{ name = "aiofiles" },
|
||||||
|
{ name = "build", marker = "extra == 'dbgpts'" },
|
||||||
{ name = "dbgpt", editable = "packages/dbgpt-core" },
|
{ name = "dbgpt", editable = "packages/dbgpt-core" },
|
||||||
{ name = "dbgpt", extras = ["client", "cli", "agent", "simple-framework", "framework", "code"], marker = "extra == 'base'", editable = "packages/dbgpt-core" },
|
{ name = "dbgpt", extras = ["client", "cli", "agent", "simple-framework", "framework", "code"], marker = "extra == 'base'", editable = "packages/dbgpt-core" },
|
||||||
{ name = "dbgpt-acc-auto", virtual = "packages/dbgpt-accelerator/dbgpt-acc-auto" },
|
{ name = "dbgpt-acc-auto", virtual = "packages/dbgpt-accelerator/dbgpt-acc-auto" },
|
||||||
|
Loading…
Reference in New Issue
Block a user