fix(awel): Fix AWEL flow error (#2461)

This commit is contained in:
Fangyin Cheng 2025-03-14 09:18:20 +08:00 committed by GitHub
parent ccfce5d279
commit 676d8ec70f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 7552 additions and 7164 deletions

View File

@ -194,6 +194,7 @@ class HOKnowledgeOperator(MapOperator[str, HOContextBody]):
space_id=space.id,
top_k=self._top_k,
rerank=reranker,
system_app=self.system_app,
)
async def map(self, query: str) -> HOContextBody:

View File

@ -90,6 +90,7 @@ def _get_type_cls(type_name: str) -> Type[Any]:
if type_name in _TYPE_REGISTRY:
return _TYPE_REGISTRY[type_name]
# Not registered, try to get the new class name from the compat config.
new_cls = get_new_class_name(type_name)
if new_cls and new_cls in _TYPE_REGISTRY:
return _TYPE_REGISTRY[new_cls]
@ -1521,4 +1522,6 @@ def _register_resource(
alias_ids: Optional[List[str]] = None,
):
"""Register the operator."""
# Register the type
_ = _get_type_name(cls)
_OPERATOR_REGISTRY.register_flow(cls, resource_metadata, alias_ids)

View File

@ -90,6 +90,9 @@ def _register(
def _register_flow_compat(
curr_version: str, last_support_version: str, metadata: FlowCompatMetadata
):
# We use type_name as the key
# For example, dbgpt.core.DefaultLLMOperator may be refactor to
# dbgpt_ext.DefaultLLMOperator, so we use DefaultLLMOperator as the key
_TYPE_NAME_TO_COMPAT_METADATA[metadata.type_name].append(
_FlowCompat(curr_version, last_support_version, metadata)
)

View File

@ -5,7 +5,7 @@ import logging
import uuid
from contextlib import suppress
from enum import Enum
from typing import Any, Dict, List, Literal, Optional, Type, Union, cast
from typing import Any, Callable, Dict, List, Literal, Optional, Type, Union, cast
from typing_extensions import Annotated
@ -1013,7 +1013,14 @@ def _build_mapper_operators(dag: DAG, mappers: List[str]) -> List[DAGNode]:
return tasks
def fill_flow_panel(flow_panel: FlowPanel):
def fill_flow_panel(
flow_panel: FlowPanel,
metadata_func: Callable[
[Union[ViewMetadata, ResourceMetadata]], Union[ViewMetadata, ResourceMetadata]
] = None,
ignore_options_error: bool = False,
update_id: bool = False,
):
"""Fill the flow panel with the latest metadata.
Args:
@ -1021,14 +1028,19 @@ def fill_flow_panel(flow_panel: FlowPanel):
"""
if not flow_panel.flow_data:
return
id_mapping = {}
for node in flow_panel.flow_data.nodes:
try:
parameters_map = {}
if node.data.is_operator:
data = cast(ViewMetadata, node.data)
key = data.get_operator_key()
operator_cls: Type[DAGNode] = _get_operator_class(key)
metadata = operator_cls.metadata
metadata = None
if metadata_func:
metadata = metadata_func(data)
if not metadata:
key = data.get_operator_key()
operator_cls: Type[DAGNode] = _get_operator_class(key)
metadata = operator_cls.metadata
if not metadata:
raise ValueError("Metadata is not set.")
input_parameters = {p.name: p for p in metadata.inputs}
@ -1036,6 +1048,8 @@ def fill_flow_panel(flow_panel: FlowPanel):
for i in node.data.inputs:
if i.name in input_parameters:
new_param = input_parameters[i.name]
i.type_name = new_param.type_name
i.type_cls = new_param.type_cls
i.label = new_param.label
i.description = new_param.description
i.dynamic = new_param.dynamic
@ -1045,6 +1059,8 @@ def fill_flow_panel(flow_panel: FlowPanel):
for i in node.data.outputs:
if i.name in output_parameters:
new_param = output_parameters[i.name]
i.type_name = new_param.type_name
i.type_cls = new_param.type_cls
i.label = new_param.label
i.description = new_param.description
i.dynamic = new_param.dynamic
@ -1053,13 +1069,27 @@ def fill_flow_panel(flow_panel: FlowPanel):
i.mappers = new_param.mappers
else:
data = cast(ResourceMetadata, node.data)
key = data.get_origin_id()
metadata = _get_resource_class(key).metadata
metadata = None
if metadata_func:
metadata = metadata_func(data)
if not metadata:
key = data.get_origin_id()
metadata = _get_resource_class(key).metadata
for param in metadata.parameters:
parameters_map[param.name] = param
# Update the latest metadata.
if node.data.type_cls != metadata.type_cls:
old_type_cls = node.data.type_cls
node.data.type_cls = metadata.type_cls
node.data.type_name = metadata.type_name
if not node.data.is_operator and update_id:
# Update key
old_id = data.id
new_id = old_id.replace(old_type_cls, metadata.type_cls)
data.id = new_id
id_mapping[old_id] = new_id
node.data.label = metadata.label
node.data.description = metadata.description
node.data.category = metadata.category
@ -1072,7 +1102,16 @@ def fill_flow_panel(flow_panel: FlowPanel):
new_param = parameters_map[param.name]
param.label = new_param.label
param.description = new_param.description
param.options = new_param.get_dict_options() # type: ignore
try:
param.options = new_param.get_dict_options() # type: ignore
except Exception as e:
if ignore_options_error:
logger.warning(
f"Unable to fill the options for the parameter: {e}"
)
else:
raise
param.type_cls = new_param.type_cls
param.default = new_param.default
param.placeholder = new_param.placeholder
param.alias = new_param.alias
@ -1080,3 +1119,13 @@ def fill_flow_panel(flow_panel: FlowPanel):
except (FlowException, ValueError) as e:
logger.warning(f"Unable to fill the flow panel: {e}")
if not update_id:
return
for edge in flow_panel.flow_data.edges:
for old_id, new_id in id_mapping.items():
edge.source.replace(old_id, new_id)
edge.target.replace(old_id, new_id)
edge.source_handle.replace(old_id, new_id)
edge.target_handle.replace(old_id, new_id)

View File

@ -1,5 +1,7 @@
import copy
import json
import os
from typing import Optional
from typing import List, Optional, Tuple, Union
import click
@ -60,14 +62,24 @@ def tool_flow_cli_group():
"by one minor version."
),
)
@click.option(
"--update-template",
is_flag=True,
default=False,
required=False,
help=_("Update the template file."),
)
def gen_compat(
module: Optional[str],
output: Optional[str],
curr_version: Optional[str],
last_support_version: Optional[str],
update_template: bool = False,
):
"""Generate the compatibility flow mapping file."""
from ._module import _scan_awel_flow
from dbgpt.util.cli._module import _scan_awel_flow
lang = os.getenv("LANGUAGE")
modules = []
if module:
@ -96,10 +108,108 @@ def gen_compat(
os.makedirs(output, exist_ok=True)
output_file = os.path.join(output, curr_version + "_compat_flow.json")
user_input = clog.ask(f"Output to {output_file}, do you want to continue?(y/n)")
save_compat = True
if not user_input or user_input.lower() != "y":
clog.info("Cancelled")
save_compat = False
if save_compat:
with open(output_file, "w") as f:
import json
json.dump(output_dicts, f, ensure_ascii=False, indent=4)
if update_template:
_update_template(output_dicts, lang=lang)
def _update_template(
compat_data: dict, template_file: Optional[str] = None, lang: str = "en"
):
from dbgpt.core.awel.dag.base import DAGNode
from dbgpt.core.awel.flow.base import (
_TYPE_REGISTRY,
)
from dbgpt.core.awel.flow.compat import (
FlowCompatMetadata,
_register_flow_compat,
get_new_class_name,
)
from dbgpt.core.awel.flow.flow_factory import (
ResourceMetadata,
ViewMetadata,
fill_flow_panel,
)
from dbgpt_serve.flow.api.schemas import ServerResponse
from dbgpt_serve.flow.service.service import (
_get_flow_templates_from_files,
_parse_flow_template_from_json,
)
last_support_version = compat_data["last_support_version"]
curr_version = compat_data["curr_version"]
for data in compat_data["compat"]:
metadata = FlowCompatMetadata(**data)
_register_flow_compat(curr_version, last_support_version, metadata)
templates: List[Tuple[str, ServerResponse]] = []
if template_file:
with open(template_file, "r") as f:
data = json.load(f)
templates.append((template_file, _parse_flow_template_from_json(data)))
else:
templates.extend(_get_flow_templates_from_files(lang))
new_templates: List[Tuple[str, ServerResponse]] = []
def metadata_func(old_metadata: Union[ViewMetadata, ResourceMetadata]):
type_cls = old_metadata.type_cls
if type_cls in _TYPE_REGISTRY:
return None
new_type_cls = None
try:
new_type_cls = get_new_class_name(type_cls)
if not new_type_cls or new_type_cls not in _TYPE_REGISTRY:
return None
obj_type = _TYPE_REGISTRY[new_type_cls]
if isinstance(old_metadata, ViewMetadata):
if not isinstance(obj_type, DAGNode):
return None
obj_type.metadata
elif isinstance(old_metadata, ResourceMetadata):
metadata_attr = f"_resource_metadata_{obj_type.__name__}"
return getattr(obj_type, metadata_attr)
else:
raise ValueError(f"Unknown metadata type: {type(old_metadata)}")
except Exception as e:
clog.warning(
f"Error get metadata for {type_cls}: {str(e)}, new_type_cls: "
f"{new_type_cls}"
)
return None
for template_file, template in templates:
new_flow_template = copy.deepcopy(template)
try:
fill_flow_panel(
new_flow_template,
metadata_func,
ignore_options_error=True,
update_id=True,
)
new_templates.append((template_file, new_flow_template))
except Exception as e:
import traceback
traceback.print_exc()
clog.warning(f"Error fill flow panel for {template_file}: {str(e)}")
user_input = clog.ask("Do you want to update the template file?(y/n)")
if not user_input or user_input.lower() != "y":
clog.info("Cancelled")
return
with open(output_file, "w") as f:
import json
json.dump(output_dicts, f, ensure_ascii=False, indent=4)
for template_file, flow in new_templates:
template_dict = {"flow": flow.model_dump()}
dag_json = json.dumps(template_dict, indent=4, ensure_ascii=False)
with open(template_file, "w") as f:
f.write(dag_json)

View File

@ -60,7 +60,7 @@ class VisChart(Vis):
"""Return the prompt for the vis protocol."""
return default_chart_type_prompt()
async def generate_param(self, **kwargs) -> Optional[Dict[str, Any]]:
def sync_generate_param(self, **kwargs) -> Optional[Dict[str, Any]]:
"""Generate the parameters required by the vis protocol."""
chart = kwargs.get("chart", None)
data_df = kwargs.get("data_df", None)

View File

@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
class VisDashboard(Vis):
"""Dashboard Vis Protocol."""
async def generate_param(self, **kwargs) -> Optional[Dict[str, Any]]:
def sync_generate_param(self, **kwargs) -> Optional[Dict[str, Any]]:
"""Generate the parameters required by the vis protocol."""
charts: Optional[dict] = kwargs.get("charts", None)
title: Optional[str] = kwargs.get("title", None)

View File

@ -149,14 +149,51 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.siliconflow.SiliconFlowDeployModelParameters",
"type_name": "SiliconFlowDeployModelParameters",
"name": "SiliconFlowDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.siliconflow.SiliconFlowDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"api_base"
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAICompatibleDeployModelParameters",
"type_name": "OpenAICompatibleDeployModelParameters",
"name": "OpenAICompatibleDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAICompatibleDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
@ -170,7 +207,7 @@
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"category": "llm_client",
"parameters": [
"apk_key",
"api_key",
"api_base"
],
"outputs": [],
@ -179,13 +216,124 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.zhipu.ZhipuDeployModelParameters",
"type_name": "ZhipuDeployModelParameters",
"name": "ZhipuDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.zhipu.ZhipuDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.moonshot.MoonshotDeployModelParameters",
"type_name": "MoonshotDeployModelParameters",
"name": "MoonshotDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.moonshot.MoonshotDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.gitee.GiteeDeployModelParameters",
"type_name": "GiteeDeployModelParameters",
"name": "GiteeDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.gitee.GiteeDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.deepseek.DeepSeekDeployModelParameters",
"type_name": "DeepSeekDeployModelParameters",
"name": "DeepSeekDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.deepseek.DeepSeekDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.ollama.OllamaDeployModelParameters",
"type_name": "OllamaDeployModelParameters",
"name": "OllamaDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.ollama.OllamaDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base"
],
"outputs": [],
@ -194,14 +342,25 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.yi.YiDeployModelParameters",
"type_name": "YiDeployModelParameters",
"name": "YiDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.yi.YiDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"api_base"
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
@ -209,14 +368,25 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.spark.SparkDeployModelParameters",
"type_name": "SparkDeployModelParameters",
"name": "SparkDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.spark.SparkDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"api_base"
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
@ -224,14 +394,25 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.baichuan.BaichuanDeployModelParameters",
"type_name": "BaichuanDeployModelParameters",
"name": "BaichuanDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.baichuan.BaichuanDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"api_base"
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
@ -239,14 +420,126 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.gemini.GeminiDeployModelParameters",
"type_name": "GeminiDeployModelParameters",
"name": "GeminiDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.gemini.GeminiDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"api_base"
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.tongyi.TongyiDeployModelParameters",
"type_name": "TongyiDeployModelParameters",
"name": "TongyiDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.tongyi.TongyiDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.volcengine.VolcengineDeployModelParameters",
"type_name": "VolcengineDeployModelParameters",
"name": "VolcengineDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.volcengine.VolcengineDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.wenxin.WenxinDeployModelParameters",
"type_name": "WenxinDeployModelParameters",
"name": "WenxinDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.wenxin.WenxinDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_key",
"api_secret"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.claude.ClaudeDeployModelParameters",
"type_name": "ClaudeDeployModelParameters",
"name": "ClaudeDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.claude.ClaudeDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
@ -491,20 +784,6 @@
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.rag.text_splitter.text_splitter.CharacterTextSplitter",
"type_name": "CharacterTextSplitter",
"name": "character_text_splitter",
"id": "resource_dbgpt.rag.text_splitter.text_splitter.CharacterTextSplitter",
"category": "rag",
"parameters": [
"separator"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.rag.text_splitter.text_splitter.RecursiveCharacterTextSplitter",
@ -1658,6 +1937,7 @@
"port",
"user",
"database",
"engine",
"password",
"http_pool_maxsize",
"http_pool_num_pools",
@ -1708,29 +1988,6 @@
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_ext.storage.vector_store.elastic_store.ElasticsearchVectorConfig",
"type_name": "ElasticsearchVectorConfig",
"name": "elasticsearch_vector_config",
"id": "resource_dbgpt_ext.storage.vector_store.elastic_store.ElasticsearchVectorConfig",
"category": "vector_store",
"parameters": [
"name",
"user",
"password",
"embedding_fn",
"max_chunks_once_load",
"max_threads",
"uri",
"port",
"alias",
"index_name"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraph",
@ -2014,6 +2271,89 @@
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.dbgpts.my.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.dbgpts.my.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.dbgpts.hub.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.dbgpts.hub.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.model.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.model.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"model_storage"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.evaluate.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.evaluate.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"embedding_model",
"similarity_top_k"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.rag.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.rag.config.ServeConfig",
"category": "rag",
"parameters": [
"api_keys",
"embedding_model",
"rerank_model",
"chunk_size",
"chunk_overlap",
"similarity_top_k",
"similarity_score_threshold",
"query_rewrite",
"max_chunks_once_load",
"max_threads",
"rerank_top_k"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "operator",
"type_cls": "dbgpt_serve.rag.operators.knowledge_space.KnowledgeSpacePromptBuilderOperator",
@ -2054,6 +2394,86 @@
],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.datasource.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.datasource.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.flow.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.flow.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"load_dbgpts_interval",
"encrypt_key"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.file.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.file.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"check_hash",
"host",
"port",
"download_chunk_size",
"save_chunk_size",
"transfer_chunk_size",
"transfer_timeout",
"local_storage_path"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.feedback.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.feedback.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.libro.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.libro.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "operator",
"type_cls": "dbgpt_serve.conversation.operators.DefaultServePreChatHistoryLoadOperator",
@ -2089,6 +2509,37 @@
],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.conversation.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.conversation.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"default_model"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.prompt.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.prompt.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"default_user",
"default_sys_code"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "operator",
"type_cls": "dbgpt.core.interface.output_parser.BaseOutputParser",

View File

@ -1,7 +1,7 @@
import json
import logging
import os
from typing import AsyncIterator, List, Optional, cast
from typing import AsyncIterator, List, Optional, Tuple, cast
import schedule
from fastapi import HTTPException
@ -424,29 +424,13 @@ class Service(BaseService[ServeEntity, ServeRequest, ServerResponse]):
Returns:
List[ServerResponse]: The response
"""
local_file_templates = self._get_flow_templates_from_files()
return PaginationResult.build_from_all(local_file_templates, page, page_size)
def _get_flow_templates_from_files(self) -> List[ServerResponse]:
"""Get a list of Flow templates from files"""
user_lang = self._system_app.config.get_current_lang(default="en")
# List files in current directory
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
template_dir = os.path.join(parent_dir, "templates", user_lang)
default_template_dir = os.path.join(parent_dir, "templates", "en")
if not os.path.exists(template_dir):
template_dir = default_template_dir
local_file_templates = _get_flow_templates_from_files(user_lang)
templates = []
for root, _, files in os.walk(template_dir):
for file in files:
if file.endswith(".json"):
try:
with open(os.path.join(root, file), "r") as f:
data = json.load(f)
templates.append(_parse_flow_template_from_json(data))
except Exception as e:
logger.warning(f"Load template {file} error: {str(e)}")
return templates
for _, t in local_file_templates:
fill_flow_panel(t)
templates.append(t)
return PaginationResult.build_from_all(templates, page, page_size)
async def chat_stream_flow_str(
self, flow_uid: str, request: CommonLLMHttpRequestBody
@ -734,3 +718,29 @@ def _parse_flow_template_from_json(json_dict: dict) -> ServerResponse:
flow_json["state"] = State.INITIALIZING
flow_json["dag_id"] = None
return ServerResponse(**flow_json)
def _get_flow_templates_from_files(
user_lang: str = "en",
) -> List[Tuple[str, ServerResponse]]:
"""Get a list of Flow templates from files"""
# List files in current directory
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
template_dir = os.path.join(parent_dir, "templates", user_lang)
default_template_dir = os.path.join(parent_dir, "templates", "en")
if not os.path.exists(template_dir):
template_dir = default_template_dir
templates = []
for root, _, files in os.walk(template_dir):
for file in files:
if file.endswith(".json"):
try:
full_path = os.path.join(root, file)
with open(full_path, "r") as f:
data = json.load(f)
templates.append(
(full_path, _parse_flow_template_from_json(data))
)
except Exception as e:
logger.warning(f"Load template {file} error: {str(e)}")
return templates

View File

@ -1,20 +1,20 @@
{
"flow": {
"uid": "fa8f01ab-fc7b-4e35-9533-f8e4cf045cce",
"uid": "3b6a2c55-0c3b-4e38-bd71-d68104cccf37",
"label": "Hybrid Knowledge Process Workflow",
"name": "hybrid_knowledge_process_workflow",
"flow_category": null,
"description": "hybrid_knowledge_process_workflow",
"state": "running",
"state": "initializing",
"error_message": "",
"source": "DBGPT-WEB",
"source_url": null,
"version": "0.1.1",
"define_type": "json",
"editable": true,
"editable": false,
"user_name": null,
"sys_code": null,
"dag_id": "flow_dag_knowledge_factory_workflow_fa8f01ab-fc7b-4e35-9533-f8e4cf045cce",
"dag_id": null,
"gmt_created": "2024-12-14 15:57:44",
"gmt_modified": "2024-12-14 15:57:44",
"metadata": null,
@ -165,14 +165,14 @@
"id": "operator_knowledge_process_join_operator___$$___rag___$$___v1_0",
"position": {
"x": 604.5,
"y": 77,
"zoom": 0
"y": 77.0,
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": 604.5,
"y": 77,
"zoom": 0
"y": 77.0,
"zoom": 0.0
},
"data": {
"label": "Knowledge Process Join Operator",
@ -231,7 +231,7 @@
],
"version": "v1",
"type_name": "KnowledgeProcessJoinOperator",
"type_cls": "dbgpt.rag.operators.process_branch.KnowledgeProcessJoinOperator",
"type_cls": "dbgpt_ext.rag.operators.process_branch.KnowledgeProcessJoinOperator",
"parameters": []
}
},
@ -242,13 +242,13 @@
"position": {
"x": 174.16583729394188,
"y": -131.63401513480102,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": 174.16583729394188,
"y": -131.63401513480102,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Vector Storage Operator",
@ -295,7 +295,7 @@
],
"version": "v1",
"type_name": "VectorStorageOperator",
"type_cls": "dbgpt.rag.operators.vector_store.VectorStorageOperator",
"type_cls": "dbgpt_ext.rag.operators.vector_store.VectorStorageOperator",
"parameters": [
{
"type_name": "VectorStoreBase",
@ -324,13 +324,13 @@
"position": {
"x": 180.89103763027083,
"y": 341.37174185366564,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": 180.89103763027083,
"y": 341.37174185366564,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Knowledge Graph Operator",
@ -377,7 +377,7 @@
],
"version": "v1",
"type_name": "KnowledgeGraphOperator",
"type_cls": "dbgpt.rag.operators.knowledge_graph.KnowledgeGraphOperator",
"type_cls": "dbgpt_ext.rag.operators.knowledge_graph.KnowledgeGraphOperator",
"parameters": [
{
"type_name": "KnowledgeGraphBase",
@ -406,13 +406,13 @@
"position": {
"x": -1056.545824254249,
"y": -163.01828337100258,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -1056.545824254249,
"y": -163.01828337100258,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Chunk Manager Operator",
@ -463,7 +463,7 @@
"parameters": [
{
"type_name": "ChunkParameters",
"type_cls": "dbgpt.rag.chunk_manager.ChunkParameters",
"type_cls": "dbgpt_ext.rag.chunk_manager.ChunkParameters",
"dynamic": false,
"dynamic_minimum": 0,
"label": "Chunk Split Parameters",
@ -488,17 +488,17 @@
"position": {
"x": -306.391788536534,
"y": 491.0961943850906,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -306.391788536534,
"y": 491.0961943850906,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "BuiltinKnowledgeGraph",
"type_cls": "dbgpt.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraph",
"type_cls": "dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraph",
"label": "Builtin Knowledge Graph",
"custom_label": null,
"name": "builtin_knowledge_graph",
@ -508,7 +508,7 @@
"flow_type": "resource",
"icon": null,
"documentation_url": null,
"id": "resource_dbgpt.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraph_0",
"id": "resource_dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraph_0",
"tags": {
"ui_version": "flow2.0"
},
@ -521,7 +521,7 @@
"parameters": [
{
"type_name": "BuiltinKnowledgeGraphConfig",
"type_cls": "dbgpt.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig",
"type_cls": "dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig",
"dynamic": false,
"dynamic_minimum": 0,
"label": "Builtin Knowledge Graph Config.",
@ -546,17 +546,17 @@
"position": {
"x": -813.7432345424928,
"y": 328.2957752754239,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -813.7432345424928,
"y": 328.2957752754239,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "BuiltinKnowledgeGraphConfig",
"type_cls": "dbgpt.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig",
"type_cls": "dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig",
"label": "Builtin Graph Config",
"custom_label": null,
"name": "knowledge_graph_config",
@ -566,7 +566,7 @@
"flow_type": "resource",
"icon": null,
"documentation_url": null,
"id": "resource_dbgpt.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig_0",
"id": "resource_dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig_0",
"tags": {
"ui_version": "flow2.0"
},
@ -707,17 +707,17 @@
"position": {
"x": -251.7635173402225,
"y": -367.01602690631285,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -251.7635173402225,
"y": -367.01602690631285,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "ChromaStore",
"type_cls": "dbgpt.storage.vector_store.chroma_store.ChromaStore",
"type_cls": "dbgpt_ext.storage.vector_store.chroma_store.ChromaStore",
"label": "Chroma Vector Store",
"custom_label": null,
"name": "chroma_vector_store",
@ -727,7 +727,7 @@
"flow_type": "resource",
"icon": null,
"documentation_url": null,
"id": "resource_dbgpt.storage.vector_store.chroma_store.ChromaStore_0",
"id": "resource_dbgpt_ext.storage.vector_store.chroma_store.ChromaStore_0",
"tags": {
"ui_version": "flow2.0"
},
@ -740,7 +740,7 @@
"parameters": [
{
"type_name": "ChromaVectorConfig",
"type_cls": "dbgpt.storage.vector_store.chroma_store.ChromaVectorConfig",
"type_cls": "dbgpt_ext.storage.vector_store.chroma_store.ChromaVectorConfig",
"dynamic": false,
"dynamic_minimum": 0,
"label": "Chroma Config",
@ -765,17 +765,17 @@
"position": {
"x": -684.4180723107158,
"y": -934.1745886033843,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -684.4180723107158,
"y": -934.1745886033843,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "ChromaVectorConfig",
"type_cls": "dbgpt.storage.vector_store.chroma_store.ChromaVectorConfig",
"type_cls": "dbgpt_ext.storage.vector_store.chroma_store.ChromaVectorConfig",
"label": "Chroma Config",
"custom_label": null,
"name": "chroma_vector_config",
@ -785,7 +785,7 @@
"flow_type": "resource",
"icon": null,
"documentation_url": null,
"id": "resource_dbgpt.storage.vector_store.chroma_store.ChromaVectorConfig_0",
"id": "resource_dbgpt_ext.storage.vector_store.chroma_store.ChromaVectorConfig_0",
"tags": {
"ui_version": "flow2.0"
},
@ -926,13 +926,13 @@
"position": {
"x": -1558.679801266548,
"y": -149.61064934406164,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -1558.679801266548,
"y": -149.61064934406164,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Knowledge Loader Operator",
@ -979,7 +979,7 @@
],
"version": "v1",
"type_name": "KnowledgeOperator",
"type_cls": "dbgpt.rag.operators.knowledge.KnowledgeOperator",
"type_cls": "dbgpt_ext.rag.operators.knowledge.KnowledgeOperator",
"parameters": [
{
"type_name": "str",
@ -1044,13 +1044,13 @@
"position": {
"x": -2015.3280350941911,
"y": -603.9181210010445,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -2015.3280350941911,
"y": -603.9181210010445,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Dict Http Trigger",
@ -1211,13 +1211,13 @@
"position": {
"x": -1112.4932879687394,
"y": -753.8648984316667,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -1112.4932879687394,
"y": -753.8648984316667,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "DefaultEmbeddings",
@ -1250,13 +1250,13 @@
"position": {
"x": -1350.535131696419,
"y": 435.2340305150391,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -1350.535131696419,
"y": 435.2340305150391,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "DefaultLLMClient",
@ -1307,13 +1307,13 @@
"position": {
"x": -614.4846931519926,
"y": -92.5163519851338,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -614.4846931519926,
"y": -92.5163519851338,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Knowledge Process Branch Operator",
@ -1384,11 +1384,11 @@
],
"version": "v1",
"type_name": "KnowledgeProcessBranchOperator",
"type_cls": "dbgpt.rag.operators.process_branch.KnowledgeProcessBranchOperator",
"type_cls": "dbgpt_ext.rag.operators.process_branch.KnowledgeProcessBranchOperator",
"parameters": []
}
}
]
}
}
}
}