mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-12 20:53:48 +00:00
ci(SDK): Add 0.7.0 workflow (#2493)
This commit is contained in:
@@ -10,6 +10,13 @@ readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
dependencies = []
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/dbgpt_acc_auto"]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/eosphoros-ai/DB-GPT"
|
||||
Documentation = "http://docs.dbgpt.cn/docs/overview"
|
||||
|
@@ -0,0 +1,3 @@
|
||||
from ._version import version as __version__ # noqa: F401
|
||||
|
||||
__ALL__ = ["__version__"]
|
@@ -0,0 +1 @@
|
||||
version = "0.7.0"
|
@@ -2,12 +2,20 @@
|
||||
# https://github.com/astral-sh/uv/issues/2252#issuecomment-2624150395
|
||||
[project]
|
||||
name = "dbgpt-acc-flash-attn"
|
||||
version = "0.1.0"
|
||||
version = "0.7.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
dependencies = []
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/dbgpt_acc_flash_attn"]
|
||||
|
||||
[dependency-groups]
|
||||
build = [
|
||||
"setuptools>=75.8.0",
|
||||
|
@@ -0,0 +1,5 @@
|
||||
"""Flash Attention wrapper for DB-GPT."""
|
||||
|
||||
from ._version import version as __version__ # noqa: F401
|
||||
|
||||
__ALL__ = ["__version__"]
|
@@ -0,0 +1 @@
|
||||
version = "0.7.0"
|
@@ -3,3 +3,7 @@
|
||||
This package will not be uploaded to PyPI. So, your can't import it if some other
|
||||
package depends on it.
|
||||
"""
|
||||
|
||||
from ._version import version as __version__ # noqa: F401
|
||||
|
||||
__ALL__ = ["__version__"]
|
||||
|
1
packages/dbgpt-app/src/dbgpt_app/_version.py
Normal file
1
packages/dbgpt-app/src/dbgpt_app/_version.py
Normal file
@@ -0,0 +1 @@
|
||||
version = "0.7.0"
|
@@ -1,5 +1,6 @@
|
||||
"""This module is the client of the dbgpt package."""
|
||||
|
||||
from ._version import version as __version__ # noqa: F401
|
||||
from .client import Client, ClientException # noqa: F401
|
||||
|
||||
__ALL__ = ["Client", "ClientException"]
|
||||
__ALL__ = ["Client", "ClientException", "__version__"]
|
||||
|
1
packages/dbgpt-client/src/dbgpt_client/_version.py
Normal file
1
packages/dbgpt-client/src/dbgpt_client/_version.py
Normal file
@@ -0,0 +1 @@
|
||||
version = "0.7.0"
|
@@ -1,15 +1,13 @@
|
||||
"""DB-GPT: Next Generation Data Interaction Solution with LLMs."""
|
||||
|
||||
from dbgpt import _version # noqa: E402
|
||||
from dbgpt.component import BaseComponent, SystemApp # noqa: F401
|
||||
|
||||
from ._version import version as __version__ # noqa: F401
|
||||
|
||||
_CORE_LIBS = ["core", "rag", "model", "agent", "datasource", "vis", "storage", "train"]
|
||||
_SERVE_LIBS = ["serve"]
|
||||
_LIBS = _CORE_LIBS + _SERVE_LIBS
|
||||
|
||||
|
||||
__version__ = _version.version
|
||||
|
||||
__ALL__ = ["__version__", "SystemApp", "BaseComponent"]
|
||||
|
||||
|
||||
|
@@ -57,6 +57,44 @@ class MDXDocGenerator:
|
||||
link_url += filename[:-4]
|
||||
return link_url
|
||||
|
||||
def get_rel_link(self, cls: Type, doc_id: str, source_cls: Type = None) -> str:
|
||||
"""Generate a relative link from the source class to the target class.
|
||||
|
||||
Args:
|
||||
cls: The target class to generate a link to
|
||||
doc_id: The document ID of the target class
|
||||
source_cls: The source class that will contain the link (optional)
|
||||
|
||||
Returns:
|
||||
A relative URL path to the target class documentation
|
||||
"""
|
||||
filename = self.generate_safe_filename(doc_id)
|
||||
target_type, _ = self._parse_class_metadata(cls)
|
||||
|
||||
# If source_cls is not provided, return a simple path without relative
|
||||
# navigation
|
||||
if not source_cls:
|
||||
if target_type:
|
||||
return f"{target_type}/{filename[:-4]}"
|
||||
return filename[:-4]
|
||||
|
||||
# Get the source class type to determine relative path
|
||||
source_type, _ = self._parse_class_metadata(source_cls)
|
||||
|
||||
# Same type - link within the same directory
|
||||
if source_type == target_type:
|
||||
return filename[:-4]
|
||||
|
||||
# Different types - need to navigate up and then down
|
||||
if source_type and target_type:
|
||||
return f"../{target_type}/{filename[:-4]}"
|
||||
elif source_type and not target_type:
|
||||
return f"../{filename[:-4]}"
|
||||
elif not source_type and target_type:
|
||||
return f"{target_type}/{filename[:-4]}"
|
||||
else:
|
||||
return filename[:-4]
|
||||
|
||||
def get_desc_for_class(self, cls: Type, default_desc: str = "") -> str:
|
||||
"""Get the description for a class."""
|
||||
doc_id = self.get_class_doc_id(cls)
|
||||
@@ -87,7 +125,10 @@ class MDXDocGenerator:
|
||||
return {"type": "code", "content": str(value)}
|
||||
|
||||
def process_nested_fields(
|
||||
self, nested_fields: Dict[str, List[ParameterDescription]], output_dir: Path
|
||||
self,
|
||||
nested_fields: Dict[str, List[ParameterDescription]],
|
||||
output_dir: Path,
|
||||
source_cls: Type,
|
||||
) -> Tuple[List[Dict], List[str]]:
|
||||
"""Handle nested fields in a parameter description."""
|
||||
links = []
|
||||
@@ -105,7 +146,8 @@ class MDXDocGenerator:
|
||||
if doc_id not in self.processed_classes:
|
||||
new_files = self.generate_class_doc(nested_cls, output_dir)
|
||||
generated_files.extend(new_files)
|
||||
link_url = self.get_abs_link(nested_cls, doc_id)
|
||||
# Use relative link instead of absolute link
|
||||
link_url = self.get_rel_link(nested_cls, doc_id, source_cls=source_cls)
|
||||
links.append(
|
||||
{
|
||||
"type": "link",
|
||||
@@ -210,7 +252,7 @@ class MDXDocGenerator:
|
||||
# Handle nested fields
|
||||
if param.nested_fields:
|
||||
nested_links, nested_files = self.process_nested_fields(
|
||||
param.nested_fields, output_dir
|
||||
param.nested_fields, output_dir, source_cls=cls
|
||||
)
|
||||
generated_files.extend(nested_files)
|
||||
if nested_links:
|
||||
@@ -393,15 +435,15 @@ class MDXDocGenerator:
|
||||
cfg_desc = self.get_desc_for_class(cls, cfg_desc)
|
||||
|
||||
doc_id = self.get_class_doc_id(cls)
|
||||
doc_link = self.get_abs_link(cls, doc_id)
|
||||
|
||||
# doc_link = self.get_abs_link(cls, doc_id)
|
||||
doc_link = self.generate_safe_filename(doc_id)
|
||||
f_index.write(" {\n")
|
||||
f_index.write(f' "name": "{cls_name}",\n')
|
||||
f_index.write(
|
||||
f' "description": {json.dumps(cfg_desc)},\n'
|
||||
) # noqa
|
||||
if doc_link:
|
||||
f_index.write(f' "link": "{doc_link}"\n')
|
||||
f_index.write(f' "link": "./{doc_link[:-4]}"\n')
|
||||
else:
|
||||
f_index.write(' "link": ""\n')
|
||||
f_index.write(" },\n")
|
||||
@@ -446,7 +488,13 @@ class MDXDocGenerator:
|
||||
# cfg_desc = cfg_desc.replace("`", "'")
|
||||
doc_id = self.get_class_doc_id(cls)
|
||||
if doc_id in self.link_cache:
|
||||
link_url = self.get_abs_link(cls, doc_id)
|
||||
# Use relative links based on the config type
|
||||
if cfg_type != "other":
|
||||
link_url = (
|
||||
f"{cfg_type}/{self.generate_safe_filename(doc_id)[:-4]}"
|
||||
)
|
||||
else:
|
||||
link_url = f"{self.generate_safe_filename(doc_id)[:-4]}"
|
||||
f.write(f"| [{cls_name}]({link_url}) | {cfg_desc} |\n")
|
||||
else:
|
||||
f.write(f"| {cls_name} | {cfg_desc} |\n")
|
||||
|
@@ -1,2 +1,3 @@
|
||||
def hello() -> str:
|
||||
return "Hello from dbgpt-integration!"
|
||||
from ._version import version as __version__ # noqa: F401
|
||||
|
||||
__ALL__ = ["__version__"]
|
||||
|
1
packages/dbgpt-ext/src/dbgpt_ext/_version.py
Normal file
1
packages/dbgpt-ext/src/dbgpt_ext/_version.py
Normal file
@@ -0,0 +1 @@
|
||||
version = "0.7.0"
|
@@ -82,12 +82,10 @@ def _import_openspg() -> Tuple[Type, Type]:
|
||||
|
||||
|
||||
def _import_full_text() -> Tuple[Type, Type]:
|
||||
from dbgpt_ext.storage.full_text.elasticsearch import (
|
||||
ElasticDocumentConfig,
|
||||
ElasticDocumentStore,
|
||||
)
|
||||
from dbgpt_ext.storage.full_text.elasticsearch import ElasticDocumentStore
|
||||
from dbgpt_ext.storage.vector_store.elastic_store import ElasticsearchStoreConfig
|
||||
|
||||
return ElasticDocumentStore, ElasticDocumentConfig
|
||||
return ElasticDocumentStore, ElasticsearchStoreConfig
|
||||
|
||||
|
||||
def _select_rag_storage(name: str) -> Tuple[Type, Type]:
|
||||
|
@@ -0,0 +1,3 @@
|
||||
from ._version import version as __version__ # noqa: F401
|
||||
|
||||
__ALL__ = ["__version__"]
|
||||
|
1
packages/dbgpt-serve/src/dbgpt_serve/_version.py
Normal file
1
packages/dbgpt-serve/src/dbgpt_serve/_version.py
Normal file
@@ -0,0 +1 @@
|
||||
version = "0.7.0"
|
@@ -345,7 +345,15 @@ class Service(BaseService[ServeEntity, ServeRequest, ServerResponse]):
|
||||
model_request = ModelRequest(**payload)
|
||||
|
||||
async for output in llm_client.generate_stream(model_request.copy()): # type: ignore
|
||||
yield f"data:{output.text}\n\n"
|
||||
text = ""
|
||||
if output.has_thinking:
|
||||
text = output.thinking_text
|
||||
lines = text.split("\n")
|
||||
text = ">" + "\n>".join(lines)
|
||||
if output.has_text:
|
||||
text += output.text
|
||||
text = text.replace("\n", "\\n")
|
||||
yield f"data:{text}\n\n"
|
||||
yield "data:[DONE]\n\n"
|
||||
except Exception as e:
|
||||
logger.error(f"Call LLMClient error, {str(e)}, detail: {payload}")
|
||||
|
Reference in New Issue
Block a user