fix(awel): Fix AWEL flow error (#2461)

This commit is contained in:
Fangyin Cheng 2025-03-14 09:18:20 +08:00 committed by GitHub
parent ccfce5d279
commit 676d8ec70f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 7552 additions and 7164 deletions

View File

@ -194,6 +194,7 @@ class HOKnowledgeOperator(MapOperator[str, HOContextBody]):
space_id=space.id,
top_k=self._top_k,
rerank=reranker,
system_app=self.system_app,
)
async def map(self, query: str) -> HOContextBody:

View File

@ -90,6 +90,7 @@ def _get_type_cls(type_name: str) -> Type[Any]:
if type_name in _TYPE_REGISTRY:
return _TYPE_REGISTRY[type_name]
# Not registered, try to get the new class name from the compat config.
new_cls = get_new_class_name(type_name)
if new_cls and new_cls in _TYPE_REGISTRY:
return _TYPE_REGISTRY[new_cls]
@ -1521,4 +1522,6 @@ def _register_resource(
alias_ids: Optional[List[str]] = None,
):
"""Register the operator."""
# Register the type
_ = _get_type_name(cls)
_OPERATOR_REGISTRY.register_flow(cls, resource_metadata, alias_ids)

View File

@ -90,6 +90,9 @@ def _register(
def _register_flow_compat(
curr_version: str, last_support_version: str, metadata: FlowCompatMetadata
):
# We use type_name as the key
# For example, dbgpt.core.DefaultLLMOperator may be refactor to
# dbgpt_ext.DefaultLLMOperator, so we use DefaultLLMOperator as the key
_TYPE_NAME_TO_COMPAT_METADATA[metadata.type_name].append(
_FlowCompat(curr_version, last_support_version, metadata)
)

View File

@ -5,7 +5,7 @@ import logging
import uuid
from contextlib import suppress
from enum import Enum
from typing import Any, Dict, List, Literal, Optional, Type, Union, cast
from typing import Any, Callable, Dict, List, Literal, Optional, Type, Union, cast
from typing_extensions import Annotated
@ -1013,7 +1013,14 @@ def _build_mapper_operators(dag: DAG, mappers: List[str]) -> List[DAGNode]:
return tasks
def fill_flow_panel(flow_panel: FlowPanel):
def fill_flow_panel(
flow_panel: FlowPanel,
metadata_func: Callable[
[Union[ViewMetadata, ResourceMetadata]], Union[ViewMetadata, ResourceMetadata]
] = None,
ignore_options_error: bool = False,
update_id: bool = False,
):
"""Fill the flow panel with the latest metadata.
Args:
@ -1021,11 +1028,16 @@ def fill_flow_panel(flow_panel: FlowPanel):
"""
if not flow_panel.flow_data:
return
id_mapping = {}
for node in flow_panel.flow_data.nodes:
try:
parameters_map = {}
if node.data.is_operator:
data = cast(ViewMetadata, node.data)
metadata = None
if metadata_func:
metadata = metadata_func(data)
if not metadata:
key = data.get_operator_key()
operator_cls: Type[DAGNode] = _get_operator_class(key)
metadata = operator_cls.metadata
@ -1036,6 +1048,8 @@ def fill_flow_panel(flow_panel: FlowPanel):
for i in node.data.inputs:
if i.name in input_parameters:
new_param = input_parameters[i.name]
i.type_name = new_param.type_name
i.type_cls = new_param.type_cls
i.label = new_param.label
i.description = new_param.description
i.dynamic = new_param.dynamic
@ -1045,6 +1059,8 @@ def fill_flow_panel(flow_panel: FlowPanel):
for i in node.data.outputs:
if i.name in output_parameters:
new_param = output_parameters[i.name]
i.type_name = new_param.type_name
i.type_cls = new_param.type_cls
i.label = new_param.label
i.description = new_param.description
i.dynamic = new_param.dynamic
@ -1053,6 +1069,10 @@ def fill_flow_panel(flow_panel: FlowPanel):
i.mappers = new_param.mappers
else:
data = cast(ResourceMetadata, node.data)
metadata = None
if metadata_func:
metadata = metadata_func(data)
if not metadata:
key = data.get_origin_id()
metadata = _get_resource_class(key).metadata
@ -1060,6 +1080,16 @@ def fill_flow_panel(flow_panel: FlowPanel):
parameters_map[param.name] = param
# Update the latest metadata.
if node.data.type_cls != metadata.type_cls:
old_type_cls = node.data.type_cls
node.data.type_cls = metadata.type_cls
node.data.type_name = metadata.type_name
if not node.data.is_operator and update_id:
# Update key
old_id = data.id
new_id = old_id.replace(old_type_cls, metadata.type_cls)
data.id = new_id
id_mapping[old_id] = new_id
node.data.label = metadata.label
node.data.description = metadata.description
node.data.category = metadata.category
@ -1072,7 +1102,16 @@ def fill_flow_panel(flow_panel: FlowPanel):
new_param = parameters_map[param.name]
param.label = new_param.label
param.description = new_param.description
try:
param.options = new_param.get_dict_options() # type: ignore
except Exception as e:
if ignore_options_error:
logger.warning(
f"Unable to fill the options for the parameter: {e}"
)
else:
raise
param.type_cls = new_param.type_cls
param.default = new_param.default
param.placeholder = new_param.placeholder
param.alias = new_param.alias
@ -1080,3 +1119,13 @@ def fill_flow_panel(flow_panel: FlowPanel):
except (FlowException, ValueError) as e:
logger.warning(f"Unable to fill the flow panel: {e}")
if not update_id:
return
for edge in flow_panel.flow_data.edges:
for old_id, new_id in id_mapping.items():
edge.source.replace(old_id, new_id)
edge.target.replace(old_id, new_id)
edge.source_handle.replace(old_id, new_id)
edge.target_handle.replace(old_id, new_id)

View File

@ -1,5 +1,7 @@
import copy
import json
import os
from typing import Optional
from typing import List, Optional, Tuple, Union
import click
@ -60,14 +62,24 @@ def tool_flow_cli_group():
"by one minor version."
),
)
@click.option(
"--update-template",
is_flag=True,
default=False,
required=False,
help=_("Update the template file."),
)
def gen_compat(
module: Optional[str],
output: Optional[str],
curr_version: Optional[str],
last_support_version: Optional[str],
update_template: bool = False,
):
"""Generate the compatibility flow mapping file."""
from ._module import _scan_awel_flow
from dbgpt.util.cli._module import _scan_awel_flow
lang = os.getenv("LANGUAGE")
modules = []
if module:
@ -96,10 +108,108 @@ def gen_compat(
os.makedirs(output, exist_ok=True)
output_file = os.path.join(output, curr_version + "_compat_flow.json")
user_input = clog.ask(f"Output to {output_file}, do you want to continue?(y/n)")
save_compat = True
if not user_input or user_input.lower() != "y":
clog.info("Cancelled")
return
save_compat = False
if save_compat:
with open(output_file, "w") as f:
import json
json.dump(output_dicts, f, ensure_ascii=False, indent=4)
if update_template:
_update_template(output_dicts, lang=lang)
def _update_template(
compat_data: dict, template_file: Optional[str] = None, lang: str = "en"
):
from dbgpt.core.awel.dag.base import DAGNode
from dbgpt.core.awel.flow.base import (
_TYPE_REGISTRY,
)
from dbgpt.core.awel.flow.compat import (
FlowCompatMetadata,
_register_flow_compat,
get_new_class_name,
)
from dbgpt.core.awel.flow.flow_factory import (
ResourceMetadata,
ViewMetadata,
fill_flow_panel,
)
from dbgpt_serve.flow.api.schemas import ServerResponse
from dbgpt_serve.flow.service.service import (
_get_flow_templates_from_files,
_parse_flow_template_from_json,
)
last_support_version = compat_data["last_support_version"]
curr_version = compat_data["curr_version"]
for data in compat_data["compat"]:
metadata = FlowCompatMetadata(**data)
_register_flow_compat(curr_version, last_support_version, metadata)
templates: List[Tuple[str, ServerResponse]] = []
if template_file:
with open(template_file, "r") as f:
data = json.load(f)
templates.append((template_file, _parse_flow_template_from_json(data)))
else:
templates.extend(_get_flow_templates_from_files(lang))
new_templates: List[Tuple[str, ServerResponse]] = []
def metadata_func(old_metadata: Union[ViewMetadata, ResourceMetadata]):
type_cls = old_metadata.type_cls
if type_cls in _TYPE_REGISTRY:
return None
new_type_cls = None
try:
new_type_cls = get_new_class_name(type_cls)
if not new_type_cls or new_type_cls not in _TYPE_REGISTRY:
return None
obj_type = _TYPE_REGISTRY[new_type_cls]
if isinstance(old_metadata, ViewMetadata):
if not isinstance(obj_type, DAGNode):
return None
obj_type.metadata
elif isinstance(old_metadata, ResourceMetadata):
metadata_attr = f"_resource_metadata_{obj_type.__name__}"
return getattr(obj_type, metadata_attr)
else:
raise ValueError(f"Unknown metadata type: {type(old_metadata)}")
except Exception as e:
clog.warning(
f"Error get metadata for {type_cls}: {str(e)}, new_type_cls: "
f"{new_type_cls}"
)
return None
for template_file, template in templates:
new_flow_template = copy.deepcopy(template)
try:
fill_flow_panel(
new_flow_template,
metadata_func,
ignore_options_error=True,
update_id=True,
)
new_templates.append((template_file, new_flow_template))
except Exception as e:
import traceback
traceback.print_exc()
clog.warning(f"Error fill flow panel for {template_file}: {str(e)}")
user_input = clog.ask("Do you want to update the template file?(y/n)")
if not user_input or user_input.lower() != "y":
clog.info("Cancelled")
return
for template_file, flow in new_templates:
template_dict = {"flow": flow.model_dump()}
dag_json = json.dumps(template_dict, indent=4, ensure_ascii=False)
with open(template_file, "w") as f:
f.write(dag_json)

View File

@ -60,7 +60,7 @@ class VisChart(Vis):
"""Return the prompt for the vis protocol."""
return default_chart_type_prompt()
async def generate_param(self, **kwargs) -> Optional[Dict[str, Any]]:
def sync_generate_param(self, **kwargs) -> Optional[Dict[str, Any]]:
"""Generate the parameters required by the vis protocol."""
chart = kwargs.get("chart", None)
data_df = kwargs.get("data_df", None)

View File

@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
class VisDashboard(Vis):
"""Dashboard Vis Protocol."""
async def generate_param(self, **kwargs) -> Optional[Dict[str, Any]]:
def sync_generate_param(self, **kwargs) -> Optional[Dict[str, Any]]:
"""Generate the parameters required by the vis protocol."""
charts: Optional[dict] = kwargs.get("charts", None)
title: Optional[str] = kwargs.get("title", None)

View File

@ -149,14 +149,51 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.siliconflow.SiliconFlowDeployModelParameters",
"type_name": "SiliconFlowDeployModelParameters",
"name": "SiliconFlowDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.siliconflow.SiliconFlowDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"api_base"
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAICompatibleDeployModelParameters",
"type_name": "OpenAICompatibleDeployModelParameters",
"name": "OpenAICompatibleDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAICompatibleDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
@ -170,7 +207,7 @@
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"category": "llm_client",
"parameters": [
"apk_key",
"api_key",
"api_base"
],
"outputs": [],
@ -179,13 +216,124 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.zhipu.ZhipuDeployModelParameters",
"type_name": "ZhipuDeployModelParameters",
"name": "ZhipuDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.zhipu.ZhipuDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.moonshot.MoonshotDeployModelParameters",
"type_name": "MoonshotDeployModelParameters",
"name": "MoonshotDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.moonshot.MoonshotDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.gitee.GiteeDeployModelParameters",
"type_name": "GiteeDeployModelParameters",
"name": "GiteeDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.gitee.GiteeDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.deepseek.DeepSeekDeployModelParameters",
"type_name": "DeepSeekDeployModelParameters",
"name": "DeepSeekDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.deepseek.DeepSeekDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.ollama.OllamaDeployModelParameters",
"type_name": "OllamaDeployModelParameters",
"name": "OllamaDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.ollama.OllamaDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base"
],
"outputs": [],
@ -194,14 +342,25 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.yi.YiDeployModelParameters",
"type_name": "YiDeployModelParameters",
"name": "YiDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.yi.YiDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"api_base"
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
@ -209,14 +368,25 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.spark.SparkDeployModelParameters",
"type_name": "SparkDeployModelParameters",
"name": "SparkDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.spark.SparkDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"api_base"
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
@ -224,14 +394,25 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.baichuan.BaichuanDeployModelParameters",
"type_name": "BaichuanDeployModelParameters",
"name": "BaichuanDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.baichuan.BaichuanDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"api_base"
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
@ -239,14 +420,126 @@
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_name": "OpenAILLMClient",
"name": "openai_llm_client",
"id": "resource_dbgpt.model.proxy.llms.chatgpt.OpenAILLMClient",
"type_cls": "dbgpt.model.proxy.llms.gemini.GeminiDeployModelParameters",
"type_name": "GeminiDeployModelParameters",
"name": "GeminiDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.gemini.GeminiDeployModelParameters",
"category": "llm_client",
"parameters": [
"apk_key",
"api_base"
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.tongyi.TongyiDeployModelParameters",
"type_name": "TongyiDeployModelParameters",
"name": "TongyiDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.tongyi.TongyiDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.volcengine.VolcengineDeployModelParameters",
"type_name": "VolcengineDeployModelParameters",
"name": "VolcengineDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.volcengine.VolcengineDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.wenxin.WenxinDeployModelParameters",
"type_name": "WenxinDeployModelParameters",
"name": "WenxinDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.wenxin.WenxinDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_key",
"api_secret"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.model.proxy.llms.claude.ClaudeDeployModelParameters",
"type_name": "ClaudeDeployModelParameters",
"name": "ClaudeDeployModelParameters",
"id": "resource_dbgpt.model.proxy.llms.claude.ClaudeDeployModelParameters",
"category": "llm_client",
"parameters": [
"name",
"backend",
"provider",
"verbose",
"concurrency",
"prompt_template",
"context_length",
"reasoning_model",
"api_base",
"api_key",
"api_type",
"api_version",
"http_proxy"
],
"outputs": [],
"inputs": [],
@ -491,20 +784,6 @@
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.rag.text_splitter.text_splitter.CharacterTextSplitter",
"type_name": "CharacterTextSplitter",
"name": "character_text_splitter",
"id": "resource_dbgpt.rag.text_splitter.text_splitter.CharacterTextSplitter",
"category": "rag",
"parameters": [
"separator"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt.rag.text_splitter.text_splitter.RecursiveCharacterTextSplitter",
@ -1658,6 +1937,7 @@
"port",
"user",
"database",
"engine",
"password",
"http_pool_maxsize",
"http_pool_num_pools",
@ -1708,29 +1988,6 @@
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_ext.storage.vector_store.elastic_store.ElasticsearchVectorConfig",
"type_name": "ElasticsearchVectorConfig",
"name": "elasticsearch_vector_config",
"id": "resource_dbgpt_ext.storage.vector_store.elastic_store.ElasticsearchVectorConfig",
"category": "vector_store",
"parameters": [
"name",
"user",
"password",
"embedding_fn",
"max_chunks_once_load",
"max_threads",
"uri",
"port",
"alias",
"index_name"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraph",
@ -2014,6 +2271,89 @@
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.dbgpts.my.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.dbgpts.my.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.dbgpts.hub.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.dbgpts.hub.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.model.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.model.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"model_storage"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.evaluate.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.evaluate.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"embedding_model",
"similarity_top_k"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.rag.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.rag.config.ServeConfig",
"category": "rag",
"parameters": [
"api_keys",
"embedding_model",
"rerank_model",
"chunk_size",
"chunk_overlap",
"similarity_top_k",
"similarity_score_threshold",
"query_rewrite",
"max_chunks_once_load",
"max_threads",
"rerank_top_k"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "operator",
"type_cls": "dbgpt_serve.rag.operators.knowledge_space.KnowledgeSpacePromptBuilderOperator",
@ -2054,6 +2394,86 @@
],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.datasource.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.datasource.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.flow.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.flow.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"load_dbgpts_interval",
"encrypt_key"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.file.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.file.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"check_hash",
"host",
"port",
"download_chunk_size",
"save_chunk_size",
"transfer_chunk_size",
"transfer_timeout",
"local_storage_path"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.feedback.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.feedback.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.libro.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.libro.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "operator",
"type_cls": "dbgpt_serve.conversation.operators.DefaultServePreChatHistoryLoadOperator",
@ -2089,6 +2509,37 @@
],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.conversation.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.conversation.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"default_model"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "resource",
"type_cls": "dbgpt_serve.prompt.config.ServeConfig",
"type_name": "ServeConfig",
"name": "ServeConfig",
"id": "resource_dbgpt_serve.prompt.config.ServeConfig",
"category": "common",
"parameters": [
"api_keys",
"default_user",
"default_sys_code"
],
"outputs": [],
"inputs": [],
"version": "v1"
},
{
"type": "operator",
"type_cls": "dbgpt.core.interface.output_parser.BaseOutputParser",

View File

@ -1,7 +1,7 @@
import json
import logging
import os
from typing import AsyncIterator, List, Optional, cast
from typing import AsyncIterator, List, Optional, Tuple, cast
import schedule
from fastapi import HTTPException
@ -424,29 +424,13 @@ class Service(BaseService[ServeEntity, ServeRequest, ServerResponse]):
Returns:
List[ServerResponse]: The response
"""
local_file_templates = self._get_flow_templates_from_files()
return PaginationResult.build_from_all(local_file_templates, page, page_size)
def _get_flow_templates_from_files(self) -> List[ServerResponse]:
"""Get a list of Flow templates from files"""
user_lang = self._system_app.config.get_current_lang(default="en")
# List files in current directory
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
template_dir = os.path.join(parent_dir, "templates", user_lang)
default_template_dir = os.path.join(parent_dir, "templates", "en")
if not os.path.exists(template_dir):
template_dir = default_template_dir
local_file_templates = _get_flow_templates_from_files(user_lang)
templates = []
for root, _, files in os.walk(template_dir):
for file in files:
if file.endswith(".json"):
try:
with open(os.path.join(root, file), "r") as f:
data = json.load(f)
templates.append(_parse_flow_template_from_json(data))
except Exception as e:
logger.warning(f"Load template {file} error: {str(e)}")
return templates
for _, t in local_file_templates:
fill_flow_panel(t)
templates.append(t)
return PaginationResult.build_from_all(templates, page, page_size)
async def chat_stream_flow_str(
self, flow_uid: str, request: CommonLLMHttpRequestBody
@ -734,3 +718,29 @@ def _parse_flow_template_from_json(json_dict: dict) -> ServerResponse:
flow_json["state"] = State.INITIALIZING
flow_json["dag_id"] = None
return ServerResponse(**flow_json)
def _get_flow_templates_from_files(
user_lang: str = "en",
) -> List[Tuple[str, ServerResponse]]:
"""Get a list of Flow templates from files"""
# List files in current directory
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
template_dir = os.path.join(parent_dir, "templates", user_lang)
default_template_dir = os.path.join(parent_dir, "templates", "en")
if not os.path.exists(template_dir):
template_dir = default_template_dir
templates = []
for root, _, files in os.walk(template_dir):
for file in files:
if file.endswith(".json"):
try:
full_path = os.path.join(root, file)
with open(full_path, "r") as f:
data = json.load(f)
templates.append(
(full_path, _parse_flow_template_from_json(data))
)
except Exception as e:
logger.warning(f"Load template {file} error: {str(e)}")
return templates

View File

@ -1,20 +1,20 @@
{
"flow": {
"uid": "fa8f01ab-fc7b-4e35-9533-f8e4cf045cce",
"uid": "3b6a2c55-0c3b-4e38-bd71-d68104cccf37",
"label": "Hybrid Knowledge Process Workflow",
"name": "hybrid_knowledge_process_workflow",
"flow_category": null,
"description": "hybrid_knowledge_process_workflow",
"state": "running",
"state": "initializing",
"error_message": "",
"source": "DBGPT-WEB",
"source_url": null,
"version": "0.1.1",
"define_type": "json",
"editable": true,
"editable": false,
"user_name": null,
"sys_code": null,
"dag_id": "flow_dag_knowledge_factory_workflow_fa8f01ab-fc7b-4e35-9533-f8e4cf045cce",
"dag_id": null,
"gmt_created": "2024-12-14 15:57:44",
"gmt_modified": "2024-12-14 15:57:44",
"metadata": null,
@ -165,14 +165,14 @@
"id": "operator_knowledge_process_join_operator___$$___rag___$$___v1_0",
"position": {
"x": 604.5,
"y": 77,
"zoom": 0
"y": 77.0,
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": 604.5,
"y": 77,
"zoom": 0
"y": 77.0,
"zoom": 0.0
},
"data": {
"label": "Knowledge Process Join Operator",
@ -231,7 +231,7 @@
],
"version": "v1",
"type_name": "KnowledgeProcessJoinOperator",
"type_cls": "dbgpt.rag.operators.process_branch.KnowledgeProcessJoinOperator",
"type_cls": "dbgpt_ext.rag.operators.process_branch.KnowledgeProcessJoinOperator",
"parameters": []
}
},
@ -242,13 +242,13 @@
"position": {
"x": 174.16583729394188,
"y": -131.63401513480102,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": 174.16583729394188,
"y": -131.63401513480102,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Vector Storage Operator",
@ -295,7 +295,7 @@
],
"version": "v1",
"type_name": "VectorStorageOperator",
"type_cls": "dbgpt.rag.operators.vector_store.VectorStorageOperator",
"type_cls": "dbgpt_ext.rag.operators.vector_store.VectorStorageOperator",
"parameters": [
{
"type_name": "VectorStoreBase",
@ -324,13 +324,13 @@
"position": {
"x": 180.89103763027083,
"y": 341.37174185366564,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": 180.89103763027083,
"y": 341.37174185366564,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Knowledge Graph Operator",
@ -377,7 +377,7 @@
],
"version": "v1",
"type_name": "KnowledgeGraphOperator",
"type_cls": "dbgpt.rag.operators.knowledge_graph.KnowledgeGraphOperator",
"type_cls": "dbgpt_ext.rag.operators.knowledge_graph.KnowledgeGraphOperator",
"parameters": [
{
"type_name": "KnowledgeGraphBase",
@ -406,13 +406,13 @@
"position": {
"x": -1056.545824254249,
"y": -163.01828337100258,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -1056.545824254249,
"y": -163.01828337100258,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Chunk Manager Operator",
@ -463,7 +463,7 @@
"parameters": [
{
"type_name": "ChunkParameters",
"type_cls": "dbgpt.rag.chunk_manager.ChunkParameters",
"type_cls": "dbgpt_ext.rag.chunk_manager.ChunkParameters",
"dynamic": false,
"dynamic_minimum": 0,
"label": "Chunk Split Parameters",
@ -488,17 +488,17 @@
"position": {
"x": -306.391788536534,
"y": 491.0961943850906,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -306.391788536534,
"y": 491.0961943850906,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "BuiltinKnowledgeGraph",
"type_cls": "dbgpt.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraph",
"type_cls": "dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraph",
"label": "Builtin Knowledge Graph",
"custom_label": null,
"name": "builtin_knowledge_graph",
@ -508,7 +508,7 @@
"flow_type": "resource",
"icon": null,
"documentation_url": null,
"id": "resource_dbgpt.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraph_0",
"id": "resource_dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraph_0",
"tags": {
"ui_version": "flow2.0"
},
@ -521,7 +521,7 @@
"parameters": [
{
"type_name": "BuiltinKnowledgeGraphConfig",
"type_cls": "dbgpt.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig",
"type_cls": "dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig",
"dynamic": false,
"dynamic_minimum": 0,
"label": "Builtin Knowledge Graph Config.",
@ -546,17 +546,17 @@
"position": {
"x": -813.7432345424928,
"y": 328.2957752754239,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -813.7432345424928,
"y": 328.2957752754239,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "BuiltinKnowledgeGraphConfig",
"type_cls": "dbgpt.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig",
"type_cls": "dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig",
"label": "Builtin Graph Config",
"custom_label": null,
"name": "knowledge_graph_config",
@ -566,7 +566,7 @@
"flow_type": "resource",
"icon": null,
"documentation_url": null,
"id": "resource_dbgpt.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig_0",
"id": "resource_dbgpt_ext.storage.knowledge_graph.knowledge_graph.BuiltinKnowledgeGraphConfig_0",
"tags": {
"ui_version": "flow2.0"
},
@ -707,17 +707,17 @@
"position": {
"x": -251.7635173402225,
"y": -367.01602690631285,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -251.7635173402225,
"y": -367.01602690631285,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "ChromaStore",
"type_cls": "dbgpt.storage.vector_store.chroma_store.ChromaStore",
"type_cls": "dbgpt_ext.storage.vector_store.chroma_store.ChromaStore",
"label": "Chroma Vector Store",
"custom_label": null,
"name": "chroma_vector_store",
@ -727,7 +727,7 @@
"flow_type": "resource",
"icon": null,
"documentation_url": null,
"id": "resource_dbgpt.storage.vector_store.chroma_store.ChromaStore_0",
"id": "resource_dbgpt_ext.storage.vector_store.chroma_store.ChromaStore_0",
"tags": {
"ui_version": "flow2.0"
},
@ -740,7 +740,7 @@
"parameters": [
{
"type_name": "ChromaVectorConfig",
"type_cls": "dbgpt.storage.vector_store.chroma_store.ChromaVectorConfig",
"type_cls": "dbgpt_ext.storage.vector_store.chroma_store.ChromaVectorConfig",
"dynamic": false,
"dynamic_minimum": 0,
"label": "Chroma Config",
@ -765,17 +765,17 @@
"position": {
"x": -684.4180723107158,
"y": -934.1745886033843,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -684.4180723107158,
"y": -934.1745886033843,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "ChromaVectorConfig",
"type_cls": "dbgpt.storage.vector_store.chroma_store.ChromaVectorConfig",
"type_cls": "dbgpt_ext.storage.vector_store.chroma_store.ChromaVectorConfig",
"label": "Chroma Config",
"custom_label": null,
"name": "chroma_vector_config",
@ -785,7 +785,7 @@
"flow_type": "resource",
"icon": null,
"documentation_url": null,
"id": "resource_dbgpt.storage.vector_store.chroma_store.ChromaVectorConfig_0",
"id": "resource_dbgpt_ext.storage.vector_store.chroma_store.ChromaVectorConfig_0",
"tags": {
"ui_version": "flow2.0"
},
@ -926,13 +926,13 @@
"position": {
"x": -1558.679801266548,
"y": -149.61064934406164,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -1558.679801266548,
"y": -149.61064934406164,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Knowledge Loader Operator",
@ -979,7 +979,7 @@
],
"version": "v1",
"type_name": "KnowledgeOperator",
"type_cls": "dbgpt.rag.operators.knowledge.KnowledgeOperator",
"type_cls": "dbgpt_ext.rag.operators.knowledge.KnowledgeOperator",
"parameters": [
{
"type_name": "str",
@ -1044,13 +1044,13 @@
"position": {
"x": -2015.3280350941911,
"y": -603.9181210010445,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -2015.3280350941911,
"y": -603.9181210010445,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Dict Http Trigger",
@ -1211,13 +1211,13 @@
"position": {
"x": -1112.4932879687394,
"y": -753.8648984316667,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -1112.4932879687394,
"y": -753.8648984316667,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "DefaultEmbeddings",
@ -1250,13 +1250,13 @@
"position": {
"x": -1350.535131696419,
"y": 435.2340305150391,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -1350.535131696419,
"y": 435.2340305150391,
"zoom": 0
"zoom": 0.0
},
"data": {
"type_name": "DefaultLLMClient",
@ -1307,13 +1307,13 @@
"position": {
"x": -614.4846931519926,
"y": -92.5163519851338,
"zoom": 0
"zoom": 0.0
},
"type": "customNode",
"position_absolute": {
"x": -614.4846931519926,
"y": -92.5163519851338,
"zoom": 0
"zoom": 0.0
},
"data": {
"label": "Knowledge Process Branch Operator",
@ -1384,7 +1384,7 @@
],
"version": "v1",
"type_name": "KnowledgeProcessBranchOperator",
"type_cls": "dbgpt.rag.operators.process_branch.KnowledgeProcessBranchOperator",
"type_cls": "dbgpt_ext.rag.operators.process_branch.KnowledgeProcessBranchOperator",
"parameters": []
}
}

View File

@ -1,36 +1,31 @@
{
"flow": {
"uid": "21eb87d5-b63a-4f41-b2aa-28d01033344d",
"uid": "33709cab-6fb8-4b5a-a637-d5022bb82ee5",
"label": "RAG Chat AWEL flow template",
"name": "rag_chat_awel_flow_template",
"flow_category": "chat_flow",
"description": "An example of a RAG chat AWEL flow.",
"state": "running",
"state": "initializing",
"error_message": "",
"source": "DBGPT-WEB",
"source_url": null,
"version": "0.1.1",
"define_type": "json",
"editable": true,
"editable": false,
"user_name": null,
"sys_code": null,
"dag_id": "flow_dag_rag_chat_awel_flow_template_21eb87d5-b63a-4f41-b2aa-28d01033344d",
"dag_id": null,
"gmt_created": "2024-08-30 10:48:56",
"gmt_modified": "2024-08-30 10:48:56",
"metadata": {
"sse_output": true,
"streaming_output": true,
"tags": {},
"triggers": [
{
"trigger_type": "http",
"path": "/api/v1/awel/trigger/templates/flow_dag_rag_chat_awel_flow_template_21eb87d5-b63a-4f41-b2aa-28d01033344d",
"methods": [
"POST"
],
"trigger_mode": "chat"
"trigger_type": "http"
}
]
],
"sse_output": true,
"streaming_output": true,
"tags": {}
},
"variables": null,
"authors": null,
@ -321,7 +316,7 @@
},
{
"type_name": "HOContextBody",
"type_cls": "dbgpt.app.operators.llm.HOContextBody",
"type_cls": "dbgpt_app.operators.llm.HOContextBody",
"label": "Extra Context",
"custom_label": null,
"name": "extra_context",
@ -348,7 +343,7 @@
],
"version": "v1",
"type_name": "HOStreamingLLMOperator",
"type_cls": "dbgpt.app.operators.llm.HOStreamingLLMOperator",
"type_cls": "dbgpt_app.operators.llm.HOStreamingLLMOperator",
"parameters": [
{
"type_name": "ChatPromptTemplate",
@ -548,10 +543,10 @@
"zoom": 0.0
},
"data": {
"label": "Knowledge Operator",
"label": "Knowledge Space Operator",
"custom_label": null,
"name": "higher_order_knowledge_operator",
"description": "Knowledge Operator, retrieve your knowledge(documents) from knowledge space",
"description": "Knowledge Space Operator, retrieve your knowledge from knowledge space",
"category": "rag",
"category_label": "RAG",
"flow_type": "operator",
@ -580,7 +575,7 @@
"outputs": [
{
"type_name": "HOContextBody",
"type_cls": "dbgpt.app.operators.llm.HOContextBody",
"type_cls": "dbgpt_app.operators.llm.HOContextBody",
"label": "Retrieved context",
"custom_label": null,
"name": "context",
@ -601,13 +596,13 @@
"dynamic_minimum": 0,
"is_list": true,
"mappers": [
"dbgpt.app.operators.rag.HOKnowledgeOperator.ChunkMapper"
"dbgpt_app.operators.rag.HOKnowledgeOperator.ChunkMapper"
]
}
],
"version": "v1",
"type_name": "HOKnowledgeOperator",
"type_cls": "dbgpt.app.operators.rag.HOKnowledgeOperator",
"type_cls": "dbgpt_app.operators.rag.HOKnowledgeOperator",
"parameters": [
{
"type_name": "str",

View File

@ -1,36 +1,31 @@
{
"flow": {
"uid": "f947dc6d-a6e6-4f02-b794-14989944173d",
"uid": "0eb98752-5621-41c6-bd80-f8fc7519d69e",
"label": "RAG Chat AWEL flow template",
"name": "rag_chat_awel_flow_template_zh",
"flow_category": "chat_flow",
"description": "知识库对话的 AWEL flow 模板",
"state": "running",
"state": "initializing",
"error_message": "",
"source": "DBGPT-WEB",
"source_url": null,
"version": "0.1.1",
"define_type": "json",
"editable": true,
"editable": false,
"user_name": null,
"sys_code": null,
"dag_id": "flow_dag_rag_chat_awel_flow_template_zh_f947dc6d-a6e6-4f02-b794-14989944173d",
"dag_id": null,
"gmt_created": "2024-09-06 17:12:50",
"gmt_modified": "2024-09-06 17:12:50",
"metadata": {
"sse_output": true,
"streaming_output": true,
"tags": {},
"triggers": [
{
"trigger_type": "http",
"path": "/api/v1/awel/trigger/example/flow_dag_rag_chat_awel_flow_template_zh_f947dc6d-a6e6-4f02-b794-14989944173d",
"methods": [
"POST"
],
"trigger_mode": "chat"
"trigger_type": "http"
}
]
],
"sse_output": true,
"streaming_output": true,
"tags": {}
},
"variables": null,
"authors": null,
@ -109,10 +104,10 @@
"zoom": 0.0
},
"data": {
"label": "常见 LLM Http 触发器",
"label": "通用 LLM HTTP 触发器",
"custom_label": null,
"name": "common_llm_http_trigger",
"description": "通过 HTTP 请求触发您的工作流,并将请求主体解析为常见的 LLM HTTP 主体",
"description": "通过 HTTP 请求触发工作流,并将请求体解析为通用 LLM HTTP 请求体",
"category": "trigger",
"category_label": "Trigger",
"flow_type": "operator",
@ -129,10 +124,10 @@
{
"type_name": "CommonLLMHttpRequestBody",
"type_cls": "dbgpt.core.awel.trigger.http_trigger.CommonLLMHttpRequestBody",
"label": "请求体",
"label": "Request Body",
"custom_label": null,
"name": "request_body",
"description": "API 端点的请求主体,解析为常见的 LLM HTTP 主体",
"description": "The request body of the API endpoint, parse as a common LLM http body",
"dynamic": false,
"dynamic_minimum": 0,
"is_list": false,
@ -191,13 +186,13 @@
"value": null,
"options": [
{
"label": "HTTP PUT 方法",
"label": "HTTP Method PUT",
"name": "http_put",
"value": "PUT",
"children": null
},
{
"label": "HTTP POST 方法",
"label": "HTTP Method POST",
"name": "http_post",
"value": "POST",
"children": null
@ -217,7 +212,7 @@
"optional": true,
"default": false,
"placeholder": null,
"description": "响应是否为流式传输",
"description": "响应是否为流式",
"value": false,
"options": null
},
@ -226,7 +221,7 @@
"type_cls": "dbgpt.core.awel.trigger.http_trigger.BaseHttpBody",
"dynamic": false,
"dynamic_minimum": 0,
"label": "HTTP 响应体",
"label": "HTTP 响应体",
"name": "http_response_body",
"is_list": false,
"category": "resource",
@ -234,7 +229,7 @@
"optional": true,
"default": null,
"placeholder": null,
"description": "API 端点的响应体",
"description": "API 端点的响应体",
"value": null,
"options": null
},
@ -251,7 +246,7 @@
"optional": true,
"default": null,
"placeholder": null,
"description": "响应媒体类型",
"description": "响应媒体类型",
"value": null,
"options": null
},
@ -310,10 +305,10 @@
{
"type_name": "CommonLLMHttpRequestBody",
"type_cls": "dbgpt.core.awel.trigger.http_trigger.CommonLLMHttpRequestBody",
"label": "通用 LLM 请求体",
"label": "Common LLM Request Body",
"custom_label": null,
"name": "common_llm_request_body",
"description": "通用 LLM 请求体。",
"description": "The common LLM request body.",
"dynamic": false,
"dynamic_minimum": 0,
"is_list": false,
@ -321,11 +316,11 @@
},
{
"type_name": "HOContextBody",
"type_cls": "dbgpt.app.operators.llm.HOContextBody",
"label": "额外上下文",
"type_cls": "dbgpt_app.operators.llm.HOContextBody",
"label": "Extra Context",
"custom_label": null,
"name": "extra_context",
"description": "用于构建提示的额外上下文(知识上下文、数据库架构等),您可以添加多个上下文。",
"description": "Extra context for building prompt(Knowledge context, database schema, etc), you can add multiple context.",
"dynamic": true,
"dynamic_minimum": 0,
"is_list": false,
@ -336,10 +331,10 @@
{
"type_name": "ModelOutput",
"type_cls": "dbgpt.core.interface.llm.ModelOutput",
"label": "流式模型输出",
"label": "Streaming Model Output",
"custom_label": null,
"name": "streaming_model_output",
"description": "流式模型输出。",
"description": "The streaming model output.",
"dynamic": false,
"dynamic_minimum": 0,
"is_list": true,
@ -348,7 +343,7 @@
],
"version": "v1",
"type_name": "HOStreamingLLMOperator",
"type_cls": "dbgpt.app.operators.llm.HOStreamingLLMOperator",
"type_cls": "dbgpt_app.operators.llm.HOStreamingLLMOperator",
"parameters": [
{
"type_name": "ChatPromptTemplate",
@ -397,7 +392,7 @@
"optional": true,
"default": null,
"placeholder": null,
"description": "LLM 客户端,如何连接到 LLM 模型,如果未提供,将使用 DB-GPT 部署的默认客户端。",
"description": "LLM 客户端,如何连接到 LLM 模型,如果未提供,则使用由 DB-GPT 部署的默认客户端。",
"value": null,
"options": null
},
@ -414,7 +409,7 @@
"optional": true,
"default": "none",
"placeholder": null,
"description": "历史合并模式,支持 'none''window' 和 'token'。'none':不合并历史,'window':按对话窗口合并,'token':按 Token 长度合并。",
"description": "历史合并模式,支持 'none''window' 和 'token'。'none':不合并历史,'window':按对话窗口合并,'token':按 Token 长度合并。",
"value": null,
"options": [
{
@ -457,7 +452,7 @@
"optional": true,
"default": "user_input",
"placeholder": null,
"description": "提示中用户消息的键,默认为 'user_input'。",
"description": "提示中用户消息的键,默认为 'user_input'。",
"value": null,
"options": null
},
@ -474,7 +469,7 @@
"optional": true,
"default": null,
"placeholder": null,
"description": "聊天历史键,将聊天历史消息传递给提示模板,如果未提供,它将解析提示模板以获取键。",
"description": "聊天历史键,用于将聊天历史消息传递给提示模板,如果未提供,它将解析提示模板以获取键。",
"value": null,
"options": null
},
@ -483,7 +478,7 @@
"type_cls": "builtins.int",
"dynamic": false,
"dynamic_minimum": 0,
"label": "保留起始轮",
"label": "保留起始轮",
"name": "keep_start_rounds",
"is_list": false,
"category": "common",
@ -491,7 +486,7 @@
"optional": true,
"default": null,
"placeholder": null,
"description": "在聊天历史中保留的起始轮。",
"description": "在聊天历史中保留的起始轮。",
"value": null,
"options": null
},
@ -500,7 +495,7 @@
"type_cls": "builtins.int",
"dynamic": false,
"dynamic_minimum": 0,
"label": "保留结束轮",
"label": "保留结束轮",
"name": "keep_end_rounds",
"is_list": false,
"category": "common",
@ -508,7 +503,7 @@
"optional": true,
"default": null,
"placeholder": null,
"description": "在聊天历史中保留的结束轮。",
"description": "在聊天历史中保留的结束轮。",
"value": null,
"options": null
},
@ -525,7 +520,7 @@
"optional": true,
"default": 2048,
"placeholder": null,
"description": "在聊天历史中保留的最大 Token 限制。",
"description": "在聊天历史中保留的最大 Token 数量。",
"value": null,
"options": null
}
@ -548,10 +543,10 @@
"zoom": 0.0
},
"data": {
"label": "知识算子",
"label": "知识空间算子",
"custom_label": null,
"name": "higher_order_knowledge_operator",
"description": "知识算子,从知识空间检索您的知识(文档)",
"description": "知识空间算子,从知识空间检索知识",
"category": "rag",
"category_label": "RAG",
"flow_type": "operator",
@ -567,10 +562,10 @@
{
"type_name": "str",
"type_cls": "builtins.str",
"label": "用户问题",
"label": "User question",
"custom_label": null,
"name": "query",
"description": "用于检索知识的用户问题",
"description": "The user question to retrieve the knowledge",
"dynamic": false,
"dynamic_minimum": 0,
"is_list": false,
@ -580,11 +575,11 @@
"outputs": [
{
"type_name": "HOContextBody",
"type_cls": "dbgpt.app.operators.llm.HOContextBody",
"label": "检索到的上下文",
"type_cls": "dbgpt_app.operators.llm.HOContextBody",
"label": "Retrieved context",
"custom_label": null,
"name": "context",
"description": "从知识空间检索到的上下文",
"description": "The retrieved context from the knowledge space",
"dynamic": false,
"dynamic_minimum": 0,
"is_list": false,
@ -593,21 +588,21 @@
{
"type_name": "Chunk",
"type_cls": "dbgpt.core.interface.knowledge.Chunk",
"label": "",
"label": "Chunks",
"custom_label": null,
"name": "chunks",
"description": "从知识空间检索到的块",
"description": "The retrieved chunks from the knowledge space",
"dynamic": false,
"dynamic_minimum": 0,
"is_list": true,
"mappers": [
"dbgpt.app.operators.rag.HOKnowledgeOperator.ChunkMapper"
"dbgpt_app.operators.rag.HOKnowledgeOperator.ChunkMapper"
]
}
],
"version": "v1",
"type_name": "HOKnowledgeOperator",
"type_cls": "dbgpt.app.operators.rag.HOKnowledgeOperator",
"type_cls": "dbgpt_app.operators.rag.HOKnowledgeOperator",
"parameters": [
{
"type_name": "str",
@ -790,7 +785,7 @@
"optional": true,
"default": "context",
"placeholder": null,
"description": "上下文的键,将用于构建提示",
"description": "上下文的键,将用于构建提示",
"value": null,
"options": null
},
@ -799,7 +794,7 @@
"type_cls": "builtins.int",
"dynamic": false,
"dynamic_minimum": 0,
"label": "前 K",
"label": "前 K",
"name": "top_k",
"is_list": false,
"category": "common",
@ -816,7 +811,7 @@
"type_cls": "builtins.float",
"dynamic": false,
"dynamic_minimum": 0,
"label": "最匹配分数",
"label": "最匹配分数",
"name": "score_threshold",
"is_list": false,
"category": "common",
@ -824,7 +819,7 @@
"optional": true,
"default": 0.3,
"placeholder": null,
"description": "检索到的块的最低匹配分数,如果匹配分数低于阈值,将被丢弃",
"description": "检索到的块的最小匹配分数,如果匹配分数低于阈值,则会被丢弃",
"value": null,
"options": null,
"ui": {
@ -846,7 +841,7 @@
"type_cls": "builtins.bool",
"dynamic": false,
"dynamic_minimum": 0,
"label": "启用重排序器",
"label": "是否启用重排序器",
"name": "reranker_enabled",
"is_list": false,
"category": "common",
@ -854,7 +849,7 @@
"optional": true,
"default": null,
"placeholder": null,
"description": "是否启用重排序器",
"description": "是否启用重排序器",
"value": null,
"options": null
},
@ -863,7 +858,7 @@
"type_cls": "builtins.int",
"dynamic": false,
"dynamic_minimum": 0,
"label": "重排序器前 K",
"label": "重排序器前 K",
"name": "reranker_top_k",
"is_list": false,
"category": "common",
@ -871,7 +866,7 @@
"optional": true,
"default": 3,
"placeholder": null,
"description": "重排序器的前 K",
"description": "重排序器的前 K",
"value": null,
"options": null
}
@ -896,10 +891,10 @@
"data": {
"type_name": "CommonChatPromptTemplate",
"type_cls": "dbgpt.core.interface.operators.prompt_operator.CommonChatPromptTemplate",
"label": "常见聊天提示模板",
"label": "通用聊天提示模板",
"custom_label": null,
"name": "common_chat_prompt_template",
"description": "用静态提示构建提示的原子。",
"description": "用于构建静态提示的算子。",
"category": "prompt",
"category_label": "Prompt",
"flow_type": "resource",
@ -1036,7 +1031,7 @@
"label": "OpenAI 流式输出算子",
"custom_label": null,
"name": "openai_streaming_output_operator",
"description": "OpenAI 流式 LLM 算子。",
"description": "OpenAI 流式大语言模型算子。",
"category": "output_parser",
"category_label": "Output Parser",
"flow_type": "operator",
@ -1052,10 +1047,10 @@
{
"type_name": "ModelOutput",
"type_cls": "dbgpt.core.interface.llm.ModelOutput",
"label": "上游模型输出",
"label": "Upstream Model Output",
"custom_label": null,
"name": "model_output",
"description": "上游模型的输出。",
"description": "The model output of upstream.",
"dynamic": false,
"dynamic_minimum": 0,
"is_list": true,
@ -1066,10 +1061,10 @@
{
"type_name": "str",
"type_cls": "builtins.str",
"label": "模型输出",
"label": "Model Output",
"custom_label": null,
"name": "model_output",
"description": "转换为 OpenAI 流格式后的模型输出。",
"description": "The model output after transformed to openai stream format.",
"dynamic": false,
"dynamic_minimum": 0,
"is_list": true,