mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-07-31 15:47:05 +00:00
feat(web): copy awel flow (#1200)
Co-authored-by: Fangyin Cheng <staneyffer@gmail.com>
This commit is contained in:
parent
0837da48ba
commit
673ddaab5b
@ -202,7 +202,7 @@ CREATE TABLE IF NOT EXISTS `prompt_manage`
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_gpts_conversations` (`conv_id`),
|
||||
KEY `idx_gpts_name` (`gpts_name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=26 DEFAULT CHARSET=utf8mb4 COMMENT="gpt conversations";
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT="gpt conversations";
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `gpts_instance` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
@ -222,7 +222,7 @@ CREATE TABLE IF NOT EXISTS `gpts_instance` (
|
||||
`is_sustainable` tinyint(1) NOT NULL COMMENT 'Applications for sustainable dialogue',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_gpts` (`gpts_name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8mb4 COMMENT="gpts instance";
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT="gpts instance";
|
||||
|
||||
CREATE TABLE `gpts_messages` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
@ -241,7 +241,7 @@ CREATE TABLE `gpts_messages` (
|
||||
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx_q_messages` (`conv_id`,`rounds`,`sender`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=100 DEFAULT CHARSET=utf8mb4 COMMENT="gpts message";
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT="gpts message";
|
||||
|
||||
|
||||
CREATE TABLE `gpts_plans` (
|
||||
@ -262,7 +262,7 @@ CREATE TABLE `gpts_plans` (
|
||||
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_sub_task` (`conv_id`,`sub_task_num`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=19 DEFAULT CHARSET=utf8mb4 COMMENT="gpt plan";
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT="gpt plan";
|
||||
|
||||
-- dbgpt.dbgpt_serve_flow definition
|
||||
CREATE TABLE `dbgpt_serve_flow` (
|
||||
@ -290,7 +290,7 @@ CREATE TABLE `dbgpt_serve_flow` (
|
||||
KEY `ix_dbgpt_serve_flow_dag_id` (`dag_id`),
|
||||
KEY `ix_dbgpt_serve_flow_user_name` (`user_name`),
|
||||
KEY `ix_dbgpt_serve_flow_name` (`name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=15 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
-- dbgpt.gpts_app definition
|
||||
CREATE TABLE `gpts_app` (
|
||||
@ -308,7 +308,7 @@ CREATE TABLE `gpts_app` (
|
||||
`icon` varchar(1024) DEFAULT NULL COMMENT 'app icon, url',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_gpts_app` (`app_name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=39 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
CREATE TABLE `gpts_app_collection` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
@ -337,7 +337,7 @@ CREATE TABLE `gpts_app_detail` (
|
||||
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_gpts_app_agent_node` (`app_name`,`agent_name`,`node_id`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=23 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
CREATE
|
||||
DATABASE IF NOT EXISTS EXAMPLE_1;
|
||||
|
3
assets/schema/upgrade/v0_5_1/upgrade_to_v0.5.1.sql
Normal file
3
assets/schema/upgrade/v0_5_1/upgrade_to_v0.5.1.sql
Normal file
@ -0,0 +1,3 @@
|
||||
USE dbgpt;
|
||||
ALTER TABLE dbgpt_serve_flow
|
||||
ADD COLUMN `error_message` varchar(512) null comment 'Error message' after `state`;
|
394
assets/schema/upgrade/v0_5_1/v0.5.0.sql
Normal file
394
assets/schema/upgrade/v0_5_1/v0.5.0.sql
Normal file
@ -0,0 +1,394 @@
|
||||
-- Full SQL of v0.5.0, please not modify this file(It must be same as the file in the release package)
|
||||
|
||||
CREATE
|
||||
DATABASE IF NOT EXISTS dbgpt;
|
||||
use dbgpt;
|
||||
|
||||
-- For alembic migration tool
|
||||
CREATE TABLE IF NOT EXISTS `alembic_version`
|
||||
(
|
||||
version_num VARCHAR(32) NOT NULL,
|
||||
CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num)
|
||||
) DEFAULT CHARSET=utf8mb4 ;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `knowledge_space`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'auto increment id',
|
||||
`name` varchar(100) NOT NULL COMMENT 'knowledge space name',
|
||||
`vector_type` varchar(50) NOT NULL COMMENT 'vector type',
|
||||
`desc` varchar(500) NOT NULL COMMENT 'description',
|
||||
`owner` varchar(100) DEFAULT NULL COMMENT 'owner',
|
||||
`context` TEXT DEFAULT NULL COMMENT 'context argument',
|
||||
`gmt_created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
|
||||
`gmt_modified` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx_name` (`name`) COMMENT 'index:idx_name'
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='knowledge space table';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `knowledge_document`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'auto increment id',
|
||||
`doc_name` varchar(100) NOT NULL COMMENT 'document path name',
|
||||
`doc_type` varchar(50) NOT NULL COMMENT 'doc type',
|
||||
`space` varchar(50) NOT NULL COMMENT 'knowledge space',
|
||||
`chunk_size` int NOT NULL COMMENT 'chunk size',
|
||||
`last_sync` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'last sync time',
|
||||
`status` varchar(50) NOT NULL COMMENT 'status TODO,RUNNING,FAILED,FINISHED',
|
||||
`content` LONGTEXT NOT NULL COMMENT 'knowledge embedding sync result',
|
||||
`result` TEXT NULL COMMENT 'knowledge content',
|
||||
`vector_ids` LONGTEXT NULL COMMENT 'vector_ids',
|
||||
`summary` LONGTEXT NULL COMMENT 'knowledge summary',
|
||||
`gmt_created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
|
||||
`gmt_modified` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx_doc_name` (`doc_name`) COMMENT 'index:idx_doc_name'
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='knowledge document table';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `document_chunk`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'auto increment id',
|
||||
`doc_name` varchar(100) NOT NULL COMMENT 'document path name',
|
||||
`doc_type` varchar(50) NOT NULL COMMENT 'doc type',
|
||||
`document_id` int NOT NULL COMMENT 'document parent id',
|
||||
`content` longtext NOT NULL COMMENT 'chunk content',
|
||||
`meta_info` varchar(200) NOT NULL COMMENT 'metadata info',
|
||||
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
|
||||
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx_document_id` (`document_id`) COMMENT 'index:document_id'
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='knowledge document chunk detail';
|
||||
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `connect_config`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`db_type` varchar(255) NOT NULL COMMENT 'db type',
|
||||
`db_name` varchar(255) NOT NULL COMMENT 'db name',
|
||||
`db_path` varchar(255) DEFAULT NULL COMMENT 'file db path',
|
||||
`db_host` varchar(255) DEFAULT NULL COMMENT 'db connect host(not file db)',
|
||||
`db_port` varchar(255) DEFAULT NULL COMMENT 'db cnnect port(not file db)',
|
||||
`db_user` varchar(255) DEFAULT NULL COMMENT 'db user',
|
||||
`db_pwd` varchar(255) DEFAULT NULL COMMENT 'db password',
|
||||
`comment` text COMMENT 'db comment',
|
||||
`sys_code` varchar(128) DEFAULT NULL COMMENT 'System code',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_db` (`db_name`),
|
||||
KEY `idx_q_db_type` (`db_type`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT 'Connection confi';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `chat_history`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`conv_uid` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation record unique id',
|
||||
`chat_mode` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation scene mode',
|
||||
`summary` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation record summary',
|
||||
`user_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'interlocutor',
|
||||
`messages` text COLLATE utf8mb4_unicode_ci COMMENT 'Conversation details',
|
||||
`message_ids` text COLLATE utf8mb4_unicode_ci COMMENT 'Message id list, split by comma',
|
||||
`sys_code` varchar(128) DEFAULT NULL COMMENT 'System code',
|
||||
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
|
||||
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
|
||||
UNIQUE KEY `conv_uid` (`conv_uid`),
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT 'Chat history';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `chat_history_message`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`conv_uid` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation record unique id',
|
||||
`index` int NOT NULL COMMENT 'Message index',
|
||||
`round_index` int NOT NULL COMMENT 'Round of conversation',
|
||||
`message_detail` text COLLATE utf8mb4_unicode_ci COMMENT 'Message details, json format',
|
||||
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
|
||||
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
|
||||
UNIQUE KEY `message_uid_index` (`conv_uid`, `index`),
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT 'Chat history message';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `chat_feed_back`
|
||||
(
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`conv_uid` varchar(128) DEFAULT NULL COMMENT 'Conversation ID',
|
||||
`conv_index` int(4) DEFAULT NULL COMMENT 'Round of conversation',
|
||||
`score` int(1) DEFAULT NULL COMMENT 'Score of user',
|
||||
`ques_type` varchar(32) DEFAULT NULL COMMENT 'User question category',
|
||||
`question` longtext DEFAULT NULL COMMENT 'User question',
|
||||
`knowledge_space` varchar(128) DEFAULT NULL COMMENT 'Knowledge space name',
|
||||
`messages` longtext DEFAULT NULL COMMENT 'The details of user feedback',
|
||||
`user_name` varchar(128) DEFAULT NULL COMMENT 'User name',
|
||||
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
|
||||
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_conv` (`conv_uid`,`conv_index`),
|
||||
KEY `idx_conv` (`conv_uid`,`conv_index`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='User feedback table';
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `my_plugin`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`tenant` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'user tenant',
|
||||
`user_code` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'user code',
|
||||
`user_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'user name',
|
||||
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin name',
|
||||
`file_name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin package file name',
|
||||
`type` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin type',
|
||||
`version` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin version',
|
||||
`use_count` int DEFAULT NULL COMMENT 'plugin total use count',
|
||||
`succ_count` int DEFAULT NULL COMMENT 'plugin total success count',
|
||||
`sys_code` varchar(128) DEFAULT NULL COMMENT 'System code',
|
||||
`gmt_created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'plugin install time',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `name` (`name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='User plugin table';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `plugin_hub`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin name',
|
||||
`description` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin description',
|
||||
`author` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin author',
|
||||
`email` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin author email',
|
||||
`type` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin type',
|
||||
`version` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin version',
|
||||
`storage_channel` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin storage channel',
|
||||
`storage_url` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin download url',
|
||||
`download_param` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin download param',
|
||||
`gmt_created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'plugin upload time',
|
||||
`installed` int DEFAULT NULL COMMENT 'plugin already installed count',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `name` (`name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='Plugin Hub table';
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `prompt_manage`
|
||||
(
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`chat_scene` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Chat scene',
|
||||
`sub_chat_scene` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Sub chat scene',
|
||||
`prompt_type` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt type: common or private',
|
||||
`prompt_name` varchar(256) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'prompt name',
|
||||
`content` longtext COLLATE utf8mb4_unicode_ci COMMENT 'Prompt content',
|
||||
`input_variables` varchar(1024) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt input variables(split by comma))',
|
||||
`model` varchar(128) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt model name(we can use different models for different prompt)',
|
||||
`prompt_language` varchar(32) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt language(eg:en, zh-cn)',
|
||||
`prompt_format` varchar(32) COLLATE utf8mb4_unicode_ci DEFAULT 'f-string' COMMENT 'Prompt format(eg: f-string, jinja2)',
|
||||
`prompt_desc` varchar(512) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt description',
|
||||
`user_name` varchar(128) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'User name',
|
||||
`sys_code` varchar(128) DEFAULT NULL COMMENT 'System code',
|
||||
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
|
||||
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `prompt_name_uiq` (`prompt_name`, `sys_code`, `prompt_language`, `model`),
|
||||
KEY `gmt_created_idx` (`gmt_created`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='Prompt management table';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `gpts_conversations` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`conv_id` varchar(255) NOT NULL COMMENT 'The unique id of the conversation record',
|
||||
`user_goal` text NOT NULL COMMENT 'User''s goals content',
|
||||
`gpts_name` varchar(255) NOT NULL COMMENT 'The gpts name',
|
||||
`state` varchar(255) DEFAULT NULL COMMENT 'The gpts state',
|
||||
`max_auto_reply_round` int(11) NOT NULL COMMENT 'max auto reply round',
|
||||
`auto_reply_count` int(11) NOT NULL COMMENT 'auto reply count',
|
||||
`user_code` varchar(255) DEFAULT NULL COMMENT 'user code',
|
||||
`sys_code` varchar(255) DEFAULT NULL COMMENT 'system app ',
|
||||
`created_at` datetime DEFAULT NULL COMMENT 'create time',
|
||||
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
|
||||
`team_mode` varchar(255) NULL COMMENT 'agent team work mode',
|
||||
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_gpts_conversations` (`conv_id`),
|
||||
KEY `idx_gpts_name` (`gpts_name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=26 DEFAULT CHARSET=utf8mb4 COMMENT="gpt conversations";
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `gpts_instance` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`gpts_name` varchar(255) NOT NULL COMMENT 'Current AI assistant name',
|
||||
`gpts_describe` varchar(2255) NOT NULL COMMENT 'Current AI assistant describe',
|
||||
`resource_db` text COMMENT 'List of structured database names contained in the current gpts',
|
||||
`resource_internet` text COMMENT 'Is it possible to retrieve information from the internet',
|
||||
`resource_knowledge` text COMMENT 'List of unstructured database names contained in the current gpts',
|
||||
`gpts_agents` varchar(1000) DEFAULT NULL COMMENT 'List of agents names contained in the current gpts',
|
||||
`gpts_models` varchar(1000) DEFAULT NULL COMMENT 'List of llm model names contained in the current gpts',
|
||||
`language` varchar(100) DEFAULT NULL COMMENT 'gpts language',
|
||||
`user_code` varchar(255) NOT NULL COMMENT 'user code',
|
||||
`sys_code` varchar(255) DEFAULT NULL COMMENT 'system app code',
|
||||
`created_at` datetime DEFAULT NULL COMMENT 'create time',
|
||||
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
|
||||
`team_mode` varchar(255) NOT NULL COMMENT 'Team work mode',
|
||||
`is_sustainable` tinyint(1) NOT NULL COMMENT 'Applications for sustainable dialogue',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_gpts` (`gpts_name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8mb4 COMMENT="gpts instance";
|
||||
|
||||
CREATE TABLE `gpts_messages` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`conv_id` varchar(255) NOT NULL COMMENT 'The unique id of the conversation record',
|
||||
`sender` varchar(255) NOT NULL COMMENT 'Who speaking in the current conversation turn',
|
||||
`receiver` varchar(255) NOT NULL COMMENT 'Who receive message in the current conversation turn',
|
||||
`model_name` varchar(255) DEFAULT NULL COMMENT 'message generate model',
|
||||
`rounds` int(11) NOT NULL COMMENT 'dialogue turns',
|
||||
`content` text COMMENT 'Content of the speech',
|
||||
`current_goal` text COMMENT 'The target corresponding to the current message',
|
||||
`context` text COMMENT 'Current conversation context',
|
||||
`review_info` text COMMENT 'Current conversation review info',
|
||||
`action_report` text COMMENT 'Current conversation action report',
|
||||
`role` varchar(255) DEFAULT NULL COMMENT 'The role of the current message content',
|
||||
`created_at` datetime DEFAULT NULL COMMENT 'create time',
|
||||
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx_q_messages` (`conv_id`,`rounds`,`sender`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=100 DEFAULT CHARSET=utf8mb4 COMMENT="gpts message";
|
||||
|
||||
|
||||
CREATE TABLE `gpts_plans` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`conv_id` varchar(255) NOT NULL COMMENT 'The unique id of the conversation record',
|
||||
`sub_task_num` int(11) NOT NULL COMMENT 'Subtask number',
|
||||
`sub_task_title` varchar(255) NOT NULL COMMENT 'subtask title',
|
||||
`sub_task_content` text NOT NULL COMMENT 'subtask content',
|
||||
`sub_task_agent` varchar(255) DEFAULT NULL COMMENT 'Available agents corresponding to subtasks',
|
||||
`resource_name` varchar(255) DEFAULT NULL COMMENT 'resource name',
|
||||
`rely` varchar(255) DEFAULT NULL COMMENT 'Subtask dependencies,like: 1,2,3',
|
||||
`agent_model` varchar(255) DEFAULT NULL COMMENT 'LLM model used by subtask processing agents',
|
||||
`retry_times` int(11) DEFAULT NULL COMMENT 'number of retries',
|
||||
`max_retry_times` int(11) DEFAULT NULL COMMENT 'Maximum number of retries',
|
||||
`state` varchar(255) DEFAULT NULL COMMENT 'subtask status',
|
||||
`result` longtext COMMENT 'subtask result',
|
||||
`created_at` datetime DEFAULT NULL COMMENT 'create time',
|
||||
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_sub_task` (`conv_id`,`sub_task_num`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=19 DEFAULT CHARSET=utf8mb4 COMMENT="gpt plan";
|
||||
|
||||
-- dbgpt.dbgpt_serve_flow definition
|
||||
CREATE TABLE `dbgpt_serve_flow` (
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'Auto increment id',
|
||||
`uid` varchar(128) NOT NULL COMMENT 'Unique id',
|
||||
`dag_id` varchar(128) DEFAULT NULL COMMENT 'DAG id',
|
||||
`name` varchar(128) DEFAULT NULL COMMENT 'Flow name',
|
||||
`flow_data` text COMMENT 'Flow data, JSON format',
|
||||
`user_name` varchar(128) DEFAULT NULL COMMENT 'User name',
|
||||
`sys_code` varchar(128) DEFAULT NULL COMMENT 'System code',
|
||||
`gmt_created` datetime DEFAULT NULL COMMENT 'Record creation time',
|
||||
`gmt_modified` datetime DEFAULT NULL COMMENT 'Record update time',
|
||||
`flow_category` varchar(64) DEFAULT NULL COMMENT 'Flow category',
|
||||
`description` varchar(512) DEFAULT NULL COMMENT 'Flow description',
|
||||
`state` varchar(32) DEFAULT NULL COMMENT 'Flow state',
|
||||
`source` varchar(64) DEFAULT NULL COMMENT 'Flow source',
|
||||
`source_url` varchar(512) DEFAULT NULL COMMENT 'Flow source url',
|
||||
`version` varchar(32) DEFAULT NULL COMMENT 'Flow version',
|
||||
`label` varchar(128) DEFAULT NULL COMMENT 'Flow label',
|
||||
`editable` int DEFAULT NULL COMMENT 'Editable, 0: editable, 1: not editable',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_uid` (`uid`),
|
||||
KEY `ix_dbgpt_serve_flow_sys_code` (`sys_code`),
|
||||
KEY `ix_dbgpt_serve_flow_uid` (`uid`),
|
||||
KEY `ix_dbgpt_serve_flow_dag_id` (`dag_id`),
|
||||
KEY `ix_dbgpt_serve_flow_user_name` (`user_name`),
|
||||
KEY `ix_dbgpt_serve_flow_name` (`name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=15 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- dbgpt.gpts_app definition
|
||||
CREATE TABLE `gpts_app` (
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`app_code` varchar(255) NOT NULL COMMENT 'Current AI assistant code',
|
||||
`app_name` varchar(255) NOT NULL COMMENT 'Current AI assistant name',
|
||||
`app_describe` varchar(2255) NOT NULL COMMENT 'Current AI assistant describe',
|
||||
`language` varchar(100) NOT NULL COMMENT 'gpts language',
|
||||
`team_mode` varchar(255) NOT NULL COMMENT 'Team work mode',
|
||||
`team_context` text COMMENT 'The execution logic and team member content that teams with different working modes rely on',
|
||||
`user_code` varchar(255) DEFAULT NULL COMMENT 'user code',
|
||||
`sys_code` varchar(255) DEFAULT NULL COMMENT 'system app code',
|
||||
`created_at` datetime DEFAULT NULL COMMENT 'create time',
|
||||
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
|
||||
`icon` varchar(1024) DEFAULT NULL COMMENT 'app icon, url',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_gpts_app` (`app_name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=39 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
CREATE TABLE `gpts_app_collection` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`app_code` varchar(255) NOT NULL COMMENT 'Current AI assistant code',
|
||||
`user_code` int(11) NOT NULL COMMENT 'user code',
|
||||
`sys_code` varchar(255) NOT NULL COMMENT 'system app code',
|
||||
`created_at` datetime DEFAULT NULL COMMENT 'create time',
|
||||
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx_app_code` (`app_code`),
|
||||
KEY `idx_user_code` (`user_code`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT="gpt collections";
|
||||
|
||||
-- dbgpt.gpts_app_detail definition
|
||||
CREATE TABLE `gpts_app_detail` (
|
||||
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
|
||||
`app_code` varchar(255) NOT NULL COMMENT 'Current AI assistant code',
|
||||
`app_name` varchar(255) NOT NULL COMMENT 'Current AI assistant name',
|
||||
`agent_name` varchar(255) NOT NULL COMMENT ' Agent name',
|
||||
`node_id` varchar(255) NOT NULL COMMENT 'Current AI assistant Agent Node id',
|
||||
`resources` text COMMENT 'Agent bind resource',
|
||||
`prompt_template` text COMMENT 'Agent bind template',
|
||||
`llm_strategy` varchar(25) DEFAULT NULL COMMENT 'Agent use llm strategy',
|
||||
`llm_strategy_value` text COMMENT 'Agent use llm strategy value',
|
||||
`created_at` datetime DEFAULT NULL COMMENT 'create time',
|
||||
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uk_gpts_app_agent_node` (`app_name`,`agent_name`,`node_id`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=23 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
CREATE
|
||||
DATABASE IF NOT EXISTS EXAMPLE_1;
|
||||
use EXAMPLE_1;
|
||||
CREATE TABLE IF NOT EXISTS `users`
|
||||
(
|
||||
`id` int NOT NULL AUTO_INCREMENT,
|
||||
`username` varchar(50) NOT NULL COMMENT '用户名',
|
||||
`password` varchar(50) NOT NULL COMMENT '密码',
|
||||
`email` varchar(50) NOT NULL COMMENT '邮箱',
|
||||
`phone` varchar(20) DEFAULT NULL COMMENT '电话',
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx_username` (`username`) COMMENT '索引:按用户名查询'
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='聊天用户表';
|
||||
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_1', 'password_1', 'user_1@example.com', '12345678901');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_2', 'password_2', 'user_2@example.com', '12345678902');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_3', 'password_3', 'user_3@example.com', '12345678903');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_4', 'password_4', 'user_4@example.com', '12345678904');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_5', 'password_5', 'user_5@example.com', '12345678905');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_6', 'password_6', 'user_6@example.com', '12345678906');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_7', 'password_7', 'user_7@example.com', '12345678907');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_8', 'password_8', 'user_8@example.com', '12345678908');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_9', 'password_9', 'user_9@example.com', '12345678909');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_10', 'password_10', 'user_10@example.com', '12345678900');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_11', 'password_11', 'user_11@example.com', '12345678901');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_12', 'password_12', 'user_12@example.com', '12345678902');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_13', 'password_13', 'user_13@example.com', '12345678903');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_14', 'password_14', 'user_14@example.com', '12345678904');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_15', 'password_15', 'user_15@example.com', '12345678905');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_16', 'password_16', 'user_16@example.com', '12345678906');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_17', 'password_17', 'user_17@example.com', '12345678907');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_18', 'password_18', 'user_18@example.com', '12345678908');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_19', 'password_19', 'user_19@example.com', '12345678909');
|
||||
INSERT INTO users (username, password, email, phone)
|
||||
VALUES ('user_20', 'password_20', 'user_20@example.com', '12345678900');
|
@ -116,9 +116,7 @@ def _migration_db_storage(param: "WebServerParameters"):
|
||||
f"Create all tables stored in this metadata error: {str(e)}"
|
||||
)
|
||||
|
||||
_ddl_init_and_upgrade(
|
||||
default_meta_data_path, param.disable_alembic_upgrade
|
||||
)
|
||||
_ddl_init_and_upgrade(default_meta_data_path, param.disable_alembic_upgrade)
|
||||
else:
|
||||
warn_msg = """For safety considerations, MySQL Database not support DDL init and upgrade. "
|
||||
"1.If you are use DB-GPT firstly, please manually execute the following command to initialize,
|
||||
|
@ -11,6 +11,7 @@ from fastapi.openapi.docs import get_swagger_ui_html
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from dbgpt._private.config import Config
|
||||
from dbgpt._version import version
|
||||
from dbgpt.app.base import (
|
||||
WebServerParameters,
|
||||
_create_model_start_listener,
|
||||
@ -50,7 +51,7 @@ CFG = Config()
|
||||
app = FastAPI(
|
||||
title="DBGPT OPEN API",
|
||||
description="This is dbgpt, with auto docs for the API and everything",
|
||||
version="0.5.0",
|
||||
version=version,
|
||||
openapi_tags=[],
|
||||
)
|
||||
# Use custom router to support priority
|
||||
@ -78,17 +79,6 @@ async def custom_swagger_ui_html():
|
||||
|
||||
system_app = SystemApp(app)
|
||||
|
||||
origins = ["*"]
|
||||
|
||||
# 添加跨域中间件
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
def mount_routers(app: FastAPI):
|
||||
"""Lazy import to avoid high time cost"""
|
||||
@ -216,8 +206,17 @@ def run_uvicorn(param: WebServerParameters):
|
||||
import uvicorn
|
||||
|
||||
setup_http_service_logging()
|
||||
|
||||
# https://github.com/encode/starlette/issues/617
|
||||
cors_app = CORSMiddleware(
|
||||
app=app,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
uvicorn.run(
|
||||
app,
|
||||
cors_app,
|
||||
host=param.host,
|
||||
port=param.port,
|
||||
log_level=logging_str_to_uvicorn_level(param.log_level),
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
3
dbgpt/app/static/_next/static/css/f38fea81b610a6c9.css
Normal file
3
dbgpt/app/static/_next/static/css/f38fea81b610a6c9.css
Normal file
File diff suppressed because one or more lines are too long
@ -1 +0,0 @@
|
||||
self.__BUILD_MANIFEST=function(s,c,a,e,t,n,d,f,k,b,h,i,u,j,p,o,g,l,r){return{__rewrites:{beforeFiles:[],afterFiles:[],fallback:[]},"/":[p,s,c,e,a,b,d,f,o,"static/chunks/9305-f44429d5185a9fc7.js","static/chunks/7299-cb3b5c1ad528f20a.js","static/chunks/pages/index-018bceca9c8a4ee9.js"],"/_error":["static/chunks/pages/_error-8095ba9e1bf12f30.js"],"/agent":[s,c,a,t,b,n,"static/chunks/pages/agent-ce4aada0ffb26742.js"],"/app":[h,s,c,e,a,t,n,i,u,"static/chunks/7958-ed34baf152e6e252.js",j,"static/chunks/pages/app-3b436b62369b5dee.js"],"/chat":["static/chunks/pages/chat-b09234393c5f8ad7.js"],"/database":[s,c,e,a,t,n,f,k,"static/chunks/7902-94d75aab69ac7c8d.js","static/chunks/pages/database-5b649049b3adcaf7.js"],"/flow":[h,i,u,j,"static/chunks/pages/flow-c83aa1081ec293f9.js"],"/flow/canvas":[p,h,s,c,e,a,d,f,i,k,u,g,o,"static/chunks/4350-1896c46dd5e9afe8.js",j,"static/chunks/pages/flow/canvas-d313d1fe05a1d9e1.js"],"/knowledge":[l,s,c,a,t,b,n,f,k,r,g,"static/chunks/8660-25eebcb95c34109b.js","static/chunks/pages/knowledge-7ab372de241857ba.js"],"/knowledge/chunk":[s,e,t,d,n,"static/chunks/pages/knowledge/chunk-148ca5920e6a3447.js"],"/models":[l,s,c,e,a,k,"static/chunks/3444-30181eacc7980e66.js","static/chunks/pages/models-a019e728f75142a1.js"],"/prompt":[s,c,e,a,d,r,"static/chunks/4733-cc041bf7a3d12e39.js","static/chunks/5396-3e98ef6b437678bd.js","static/chunks/pages/prompt-8ac6786093609ab9.js"],sortedPages:["/","/_app","/_error","/agent","/app","/chat","/database","/flow","/flow/canvas","/knowledge","/knowledge/chunk","/models","/prompt"]}}("static/chunks/7113-c0c4ee5dc30929ba.js","static/chunks/5503-c65f6d730754acc7.js","static/chunks/9479-21f588e1fd4e6b6d.js","static/chunks/1009-f20562de52b03b76.js","static/chunks/4442-2fd5fdaab894a502.js","static/chunks/5813-c6244a8eba7ef4ae.js","static/chunks/4810-1e930464030aee69.js","static/chunks/411-b5d3e7f64bee2335.js","static/chunks/8928-0e78def492052d13.js","static/chunks/4553-5a62c446efb06d63.js","static/chunks/971df74e-7436ff4085ebb785.js","static/chunks/7434-29506257e67e8077.js","static/chunks/9924-5bce555f07385e1f.js","static/css/b4846eed11c4725f.css","static/chunks/29107295-75edf0bf34e24b1e.js","static/chunks/2487-24749b0b156943d8.js","static/chunks/6485-a0f49ba464882399.js","static/chunks/75fc9c18-1d6133135d3d283c.js","static/chunks/8548-e633dfc38edeb044.js"),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();
|
@ -0,0 +1 @@
|
||||
self.__BUILD_MANIFEST=function(s,c,a,e,t,n,d,f,k,h,i,u,b,j,p,o,g,l,r){return{__rewrites:{beforeFiles:[],afterFiles:[],fallback:[]},"/":[p,s,c,a,e,h,d,f,o,"static/chunks/9305-f44429d5185a9fc7.js","static/chunks/7299-cb3b5c1ad528f20a.js","static/chunks/pages/index-7210f638987b7faa.js"],"/_error":["static/chunks/pages/_error-8095ba9e1bf12f30.js"],"/agent":[s,c,a,t,h,n,"static/chunks/pages/agent-7a77bcca568fd9db.js"],"/app":[i,s,c,a,e,t,n,u,b,"static/chunks/7958-ed34baf152e6e252.js",j,"static/chunks/pages/app-965a0a3b7d3520ce.js"],"/chat":["static/chunks/pages/chat-3edbd0be9e7a02e9.js"],"/database":[s,c,a,e,t,n,f,k,"static/chunks/7559-0f4c8f77fad3cd28.js","static/chunks/pages/database-5c41585a3fd75216.js"],"/flow":[i,s,c,a,u,b,j,"static/chunks/pages/flow-af8106c2d4339aca.js"],"/flow/canvas":[p,i,s,c,a,e,d,f,u,k,b,g,o,"static/chunks/4350-1896c46dd5e9afe8.js",j,"static/chunks/pages/flow/canvas-57121d2cb2bfbe1c.js"],"/knowledge":[l,s,c,a,t,h,n,f,k,r,g,"static/chunks/8660-13b381e809ea1aa1.js","static/chunks/pages/knowledge-ca1dce3ad63f6439.js"],"/knowledge/chunk":[s,e,t,d,n,"static/chunks/pages/knowledge/chunk-bc057a5c02a2c123.js"],"/models":[l,s,c,a,e,k,"static/chunks/3444-30181eacc7980e66.js","static/chunks/pages/models-4a50fb0ade28ee60.js"],"/prompt":[s,c,a,e,d,r,"static/chunks/4733-cc041bf7a3d12e39.js","static/chunks/5396-3e98ef6b437678bd.js","static/chunks/pages/prompt-a25dc012271e1f81.js"],sortedPages:["/","/_app","/_error","/agent","/app","/chat","/database","/flow","/flow/canvas","/knowledge","/knowledge/chunk","/models","/prompt"]}}("static/chunks/2185-30f9d0578fa0d631.js","static/chunks/5503-c65f6d730754acc7.js","static/chunks/9479-21f588e1fd4e6b6d.js","static/chunks/1009-4b2af86bde623424.js","static/chunks/4442-2fd5fdaab894a502.js","static/chunks/5813-c6244a8eba7ef4ae.js","static/chunks/4810-1e930464030aee69.js","static/chunks/411-b5d3e7f64bee2335.js","static/chunks/8928-0e78def492052d13.js","static/chunks/4553-5a62c446efb06d63.js","static/chunks/971df74e-7436ff4085ebb785.js","static/chunks/7434-29506257e67e8077.js","static/chunks/9924-5bce555f07385e1f.js","static/css/b4846eed11c4725f.css","static/chunks/29107295-75edf0bf34e24b1e.js","static/chunks/2487-24749b0b156943d8.js","static/chunks/6485-a0f49ba464882399.js","static/chunks/75fc9c18-1d6133135d3d283c.js","static/chunks/8548-e633dfc38edeb044.js"),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -649,8 +649,8 @@ class BaseMetadata(BaseResource):
|
||||
# TODO, skip the optional parameters.
|
||||
raise FlowParameterMetadataException(
|
||||
f"Parameters count not match(current key: {self.id}). "
|
||||
f"Expected {len(self.parameters)}, "
|
||||
f"but got {len(view_parameters)} from JSON metadata."
|
||||
f"Expected {len(current_required_parameters)}, "
|
||||
f"but got {len(view_required_parameters)} from JSON metadata."
|
||||
f"Required parameters: {current_required_parameters.keys()}, "
|
||||
f"but got {view_required_parameters.keys()}."
|
||||
)
|
||||
|
@ -26,6 +26,9 @@ from .exceptions import (
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
AWEL_FLOW_VERSION = "0.1.1"
|
||||
|
||||
|
||||
class FlowPositionData(BaseModel):
|
||||
"""Position of a node in a flow."""
|
||||
|
||||
@ -152,12 +155,10 @@ class State(str, Enum):
|
||||
INITIALIZING = "initializing"
|
||||
DEVELOPING = "developing"
|
||||
TESTING = "testing"
|
||||
READY_TO_DEPLOY = "ready_to_deploy"
|
||||
DEPLOYED = "deployed"
|
||||
RUNNING = "running"
|
||||
PAUSED = "paused"
|
||||
DISABLED = "disabled"
|
||||
ENABLED = "enabled"
|
||||
LOAD_FAILED = "load_failed"
|
||||
|
||||
@classmethod
|
||||
def value_of(cls, value: Optional[str]) -> "State":
|
||||
@ -169,6 +170,60 @@ class State(str, Enum):
|
||||
return state
|
||||
raise ValueError(f"Invalid state value: {value}")
|
||||
|
||||
@classmethod
|
||||
def can_change_state(cls, current_state: "State", new_state: "State") -> bool:
|
||||
"""Change the state of the flow panel."""
|
||||
allowed_transitions: Dict[State, List[State]] = {
|
||||
State.INITIALIZING: [
|
||||
State.DEVELOPING,
|
||||
State.INITIALIZING,
|
||||
State.LOAD_FAILED,
|
||||
],
|
||||
State.DEVELOPING: [
|
||||
State.TESTING,
|
||||
State.DEPLOYED,
|
||||
State.DISABLED,
|
||||
State.DEVELOPING,
|
||||
State.LOAD_FAILED,
|
||||
],
|
||||
State.TESTING: [
|
||||
State.TESTING,
|
||||
State.DEPLOYED,
|
||||
State.DEVELOPING,
|
||||
State.DISABLED,
|
||||
State.RUNNING,
|
||||
State.LOAD_FAILED,
|
||||
],
|
||||
State.DEPLOYED: [
|
||||
State.DEPLOYED,
|
||||
State.DEVELOPING,
|
||||
State.TESTING,
|
||||
State.DISABLED,
|
||||
State.RUNNING,
|
||||
State.LOAD_FAILED,
|
||||
],
|
||||
State.RUNNING: [
|
||||
State.RUNNING,
|
||||
State.DEPLOYED,
|
||||
State.TESTING,
|
||||
State.DISABLED,
|
||||
],
|
||||
State.DISABLED: [State.DISABLED, State.DEPLOYED],
|
||||
State.LOAD_FAILED: [
|
||||
State.LOAD_FAILED,
|
||||
State.DEVELOPING,
|
||||
State.DEPLOYED,
|
||||
State.DISABLED,
|
||||
],
|
||||
}
|
||||
if new_state in allowed_transitions[current_state]:
|
||||
return True
|
||||
else:
|
||||
logger.error(
|
||||
f"Invalid state transition from {current_state} to {new_state}"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
class FlowCategory(str, Enum):
|
||||
"""Flow category."""
|
||||
@ -219,6 +274,11 @@ class FlowPanel(BaseModel):
|
||||
state: State = Field(
|
||||
default=State.INITIALIZING, description="Current state of the flow panel"
|
||||
)
|
||||
error_message: Optional[str] = Field(
|
||||
None,
|
||||
description="Error message of load the flow panel",
|
||||
examples=["Unable to load the flow panel."],
|
||||
)
|
||||
source: Optional[str] = Field(
|
||||
"DBGPT-WEB",
|
||||
description="Source of the flow panel",
|
||||
@ -229,7 +289,9 @@ class FlowPanel(BaseModel):
|
||||
description="Source url of the flow panel",
|
||||
)
|
||||
version: Optional[str] = Field(
|
||||
"0.1.0", description="Version of the flow panel", examples=["0.1.0", "0.2.0"]
|
||||
AWEL_FLOW_VERSION,
|
||||
description="Version of the flow panel",
|
||||
examples=["0.1.0", "0.2.0"],
|
||||
)
|
||||
editable: bool = Field(
|
||||
True,
|
||||
@ -251,26 +313,6 @@ class FlowPanel(BaseModel):
|
||||
examples=["2021-08-01 12:00:00", "2021-08-01 12:00:01", "2021-08-01 12:00:02"],
|
||||
)
|
||||
|
||||
def change_state(self, new_state: State) -> bool:
|
||||
"""Change the state of the flow panel."""
|
||||
allowed_transitions: Dict[State, List[State]] = {
|
||||
State.INITIALIZING: [State.DEVELOPING],
|
||||
State.DEVELOPING: [State.TESTING, State.DISABLED],
|
||||
State.TESTING: [State.READY_TO_DEPLOY, State.DEVELOPING, State.DISABLED],
|
||||
State.READY_TO_DEPLOY: [State.DEPLOYED, State.DEVELOPING],
|
||||
State.DEPLOYED: [State.RUNNING, State.DISABLED],
|
||||
State.RUNNING: [State.PAUSED, State.DISABLED, State.DEPLOYED],
|
||||
State.PAUSED: [State.RUNNING, State.DISABLED],
|
||||
State.DISABLED: [State.ENABLED],
|
||||
State.ENABLED: [s for s in State if s != State.INITIALIZING],
|
||||
}
|
||||
if new_state in allowed_transitions[self.state]:
|
||||
self.state = new_state
|
||||
return True
|
||||
else:
|
||||
logger.error(f"Invalid state transition from {self.state} to {new_state}")
|
||||
return False
|
||||
|
||||
@root_validator(pre=True)
|
||||
def pre_fill(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Pre fill the metadata."""
|
||||
|
@ -4,10 +4,12 @@ After DB-GPT started, the trigger manager will be initialized and register all t
|
||||
"""
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional
|
||||
from collections import defaultdict
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Union
|
||||
|
||||
from dbgpt.component import BaseComponent, ComponentType, SystemApp
|
||||
|
||||
from ..util.http_util import join_paths
|
||||
from .base import Trigger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@ -64,6 +66,7 @@ class HttpTriggerManager(TriggerManager):
|
||||
self._router_prefix = router_prefix
|
||||
self._router = router
|
||||
self._trigger_map: Dict[str, Trigger] = {}
|
||||
self._router_tables: Dict[str, Set[str]] = defaultdict(set)
|
||||
|
||||
def register_trigger(self, trigger: Any, system_app: SystemApp) -> None:
|
||||
"""Register a trigger to current manager."""
|
||||
@ -73,6 +76,11 @@ class HttpTriggerManager(TriggerManager):
|
||||
raise ValueError(f"Current trigger {trigger} not an object of HttpTrigger")
|
||||
trigger_id = trigger.node_id
|
||||
if trigger_id not in self._trigger_map:
|
||||
path = join_paths(self._router_prefix, trigger._endpoint)
|
||||
methods = trigger._methods
|
||||
# Check whether the route is already registered
|
||||
self._register_route_tables(path, methods)
|
||||
try:
|
||||
if trigger.register_to_app():
|
||||
app = system_app.app
|
||||
if not app:
|
||||
@ -82,6 +90,9 @@ class HttpTriggerManager(TriggerManager):
|
||||
else:
|
||||
trigger.mount_to_router(self._router)
|
||||
self._trigger_map[trigger_id] = trigger
|
||||
except Exception as e:
|
||||
self._unregister_route_tables(path, methods)
|
||||
raise e
|
||||
|
||||
def unregister_trigger(self, trigger: Any, system_app: SystemApp) -> None:
|
||||
"""Unregister a trigger to current manager."""
|
||||
@ -96,6 +107,9 @@ class HttpTriggerManager(TriggerManager):
|
||||
if not app:
|
||||
raise ValueError("System app not initialized")
|
||||
trigger.remove_from_app(app, self._router_prefix)
|
||||
self._unregister_route_tables(
|
||||
join_paths(self._router_prefix, trigger._endpoint), trigger._methods
|
||||
)
|
||||
del self._trigger_map[trigger_id]
|
||||
|
||||
def _init_app(self, system_app: SystemApp):
|
||||
@ -120,6 +134,34 @@ class HttpTriggerManager(TriggerManager):
|
||||
"""
|
||||
return len(self._trigger_map) > 0
|
||||
|
||||
def _register_route_tables(
|
||||
self, path: str, methods: Optional[Union[str, List[str]]]
|
||||
):
|
||||
methods = self._parse_methods(methods)
|
||||
tables = self._router_tables[path]
|
||||
for m in methods:
|
||||
if m in tables:
|
||||
raise ValueError(f"Route {path} method {m} already registered")
|
||||
tables.add(m)
|
||||
self._router_tables[path] = tables
|
||||
|
||||
def _unregister_route_tables(
|
||||
self, path: str, methods: Optional[Union[str, List[str]]]
|
||||
):
|
||||
methods = self._parse_methods(methods)
|
||||
tables = self._router_tables[path]
|
||||
for m in methods:
|
||||
if m in tables:
|
||||
tables.remove(m)
|
||||
self._router_tables[path] = tables
|
||||
|
||||
def _parse_methods(self, methods: Optional[Union[str, List[str]]]) -> List[str]:
|
||||
if not methods:
|
||||
return ["GET"]
|
||||
elif isinstance(methods, str):
|
||||
return [methods]
|
||||
return [m.upper() for m in methods]
|
||||
|
||||
|
||||
class DefaultTriggerManager(TriggerManager, BaseComponent):
|
||||
"""Default trigger manager for AWEL.
|
||||
|
@ -1,4 +1,5 @@
|
||||
import logging
|
||||
import sys
|
||||
from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar
|
||||
|
||||
from fastapi import HTTPException, Request
|
||||
@ -7,6 +8,12 @@ from fastapi.responses import JSONResponse
|
||||
|
||||
from dbgpt._private.pydantic import BaseModel, Field
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
try:
|
||||
from exceptiongroup import ExceptionGroup
|
||||
except ImportError:
|
||||
ExceptionGroup = None
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fastapi import FastAPI
|
||||
|
||||
@ -71,8 +78,16 @@ async def http_exception_handler(request: Request, exc: HTTPException):
|
||||
|
||||
async def common_exception_handler(request: Request, exc: Exception) -> JSONResponse:
|
||||
"""Common exception handler"""
|
||||
|
||||
if ExceptionGroup and isinstance(exc, ExceptionGroup):
|
||||
err_strs = []
|
||||
for e in exc.exceptions:
|
||||
err_strs.append(str(e))
|
||||
err_msg = ";".join(err_strs)
|
||||
else:
|
||||
err_msg = str(exc)
|
||||
res = Result.failed(
|
||||
msg=str(exc),
|
||||
msg=err_msg,
|
||||
err_code="E0003",
|
||||
)
|
||||
logger.error(f"common_exception_handler catch Exception: {res}")
|
||||
|
@ -109,7 +109,7 @@ async def create(
|
||||
Returns:
|
||||
ServerResponse: The response
|
||||
"""
|
||||
return Result.succ(service.create(request))
|
||||
return Result.succ(service.create_and_save_dag(request))
|
||||
|
||||
|
||||
@router.put(
|
||||
@ -129,7 +129,7 @@ async def update(
|
||||
Returns:
|
||||
ServerResponse: The response
|
||||
"""
|
||||
return Result.succ(service.update(request))
|
||||
return Result.succ(service.update_flow(request))
|
||||
|
||||
|
||||
@router.delete("/flows/{uid}")
|
||||
|
@ -28,6 +28,7 @@ class ServeEntity(Model):
|
||||
flow_data = Column(Text, nullable=True, comment="Flow data, JSON format")
|
||||
description = Column(String(512), nullable=True, comment="Flow description")
|
||||
state = Column(String(32), nullable=True, comment="Flow state")
|
||||
error_message = Column(String(512), nullable=True, comment="Error message")
|
||||
source = Column(String(64), nullable=True, comment="Flow source")
|
||||
source_url = Column(String(512), nullable=True, comment="Flow source url")
|
||||
version = Column(String(32), nullable=True, comment="Flow version")
|
||||
@ -84,6 +85,9 @@ class ServeDao(BaseDao[ServeEntity, ServeRequest, ServerResponse]):
|
||||
request_dict = request.dict() if isinstance(request, ServeRequest) else request
|
||||
flow_data = json.dumps(request_dict.get("flow_data"), ensure_ascii=False)
|
||||
state = request_dict.get("state", State.INITIALIZING.value)
|
||||
error_message = request_dict.get("error_message")
|
||||
if error_message:
|
||||
error_message = error_message[:500]
|
||||
new_dict = {
|
||||
"uid": request_dict.get("uid"),
|
||||
"dag_id": request_dict.get("dag_id"),
|
||||
@ -92,6 +96,7 @@ class ServeDao(BaseDao[ServeEntity, ServeRequest, ServerResponse]):
|
||||
"flow_category": request_dict.get("flow_category"),
|
||||
"flow_data": flow_data,
|
||||
"state": state,
|
||||
"error_message": error_message,
|
||||
"source": request_dict.get("source"),
|
||||
"source_url": request_dict.get("source_url"),
|
||||
"version": request_dict.get("version"),
|
||||
@ -121,6 +126,7 @@ class ServeDao(BaseDao[ServeEntity, ServeRequest, ServerResponse]):
|
||||
flow_category=entity.flow_category,
|
||||
flow_data=flow_data,
|
||||
state=State.value_of(entity.state),
|
||||
error_message=entity.error_message,
|
||||
source=entity.source,
|
||||
source_url=entity.source_url,
|
||||
version=entity.version,
|
||||
@ -151,6 +157,7 @@ class ServeDao(BaseDao[ServeEntity, ServeRequest, ServerResponse]):
|
||||
flow_data=flow_data,
|
||||
description=entity.description,
|
||||
state=State.value_of(entity.state),
|
||||
error_message=entity.error_message,
|
||||
source=entity.source,
|
||||
source_url=entity.source_url,
|
||||
version=entity.version,
|
||||
@ -183,13 +190,15 @@ class ServeDao(BaseDao[ServeEntity, ServeRequest, ServerResponse]):
|
||||
entry.description = update_request.description
|
||||
if update_request.state:
|
||||
entry.state = update_request.state.value
|
||||
if update_request.error_message is not None:
|
||||
# Keep first 500 characters
|
||||
entry.error_message = update_request.error_message[:500]
|
||||
if update_request.source:
|
||||
entry.source = update_request.source
|
||||
if update_request.source_url:
|
||||
entry.source_url = update_request.source_url
|
||||
if update_request.version:
|
||||
entry.version = update_request.version
|
||||
if update_request.editable:
|
||||
entry.editable = ServeEntity.parse_editable(update_request.editable)
|
||||
if update_request.user_name:
|
||||
entry.user_name = update_request.user_name
|
||||
|
@ -13,7 +13,7 @@ from dbgpt.core.awel import (
|
||||
CommonLLMHttpResponseBody,
|
||||
)
|
||||
from dbgpt.core.awel.dag.dag_manager import DAGManager
|
||||
from dbgpt.core.awel.flow.flow_factory import FlowCategory, FlowFactory
|
||||
from dbgpt.core.awel.flow.flow_factory import FlowCategory, FlowFactory, State
|
||||
from dbgpt.core.awel.trigger.http_trigger import CommonLLMHttpTrigger
|
||||
from dbgpt.core.interface.llm import ModelOutput
|
||||
from dbgpt.serve.core import BaseService
|
||||
@ -103,14 +103,55 @@ class Service(BaseService[ServeEntity, ServeRequest, ServerResponse]):
|
||||
Returns:
|
||||
ServerResponse: The response
|
||||
"""
|
||||
|
||||
def create_and_save_dag(
|
||||
self, request: ServeRequest, save_failed_flow: bool = False
|
||||
) -> ServerResponse:
|
||||
"""Create a new Flow entity and save the DAG
|
||||
|
||||
Args:
|
||||
request (ServeRequest): The request
|
||||
save_failed_flow (bool): Whether to save the failed flow
|
||||
|
||||
Returns:
|
||||
ServerResponse: The response
|
||||
"""
|
||||
try:
|
||||
# Build DAG from request
|
||||
dag = self._flow_factory.build(request)
|
||||
request.dag_id = dag.dag_id
|
||||
# Save DAG to storage
|
||||
request.flow_category = self._parse_flow_category(dag)
|
||||
except Exception as e:
|
||||
if save_failed_flow:
|
||||
request.state = State.LOAD_FAILED
|
||||
request.error_message = str(e)
|
||||
return self.dao.create(request)
|
||||
else:
|
||||
raise e
|
||||
res = self.dao.create(request)
|
||||
|
||||
state = request.state
|
||||
try:
|
||||
if state == State.DEPLOYED:
|
||||
# Register the DAG
|
||||
self.dag_manager.register_dag(dag)
|
||||
# Update state to RUNNING
|
||||
request.state = State.RUNNING
|
||||
request.error_message = ""
|
||||
self.dao.update({"uid": request.uid}, request)
|
||||
else:
|
||||
logger.info(f"Flow state is {state}, skip register DAG")
|
||||
except Exception as e:
|
||||
logger.warning(f"Register DAG({dag.dag_id}) error: {str(e)}")
|
||||
if save_failed_flow:
|
||||
request.state = State.LOAD_FAILED
|
||||
request.error_message = f"Register DAG error: {str(e)}"
|
||||
self.dao.update({"uid": request.uid}, request)
|
||||
else:
|
||||
# Rollback
|
||||
self.delete(request.uid)
|
||||
raise e
|
||||
return res
|
||||
|
||||
def _pre_load_dag_from_db(self):
|
||||
@ -131,7 +172,15 @@ class Service(BaseService[ServeEntity, ServeRequest, ServerResponse]):
|
||||
for entity in entities:
|
||||
try:
|
||||
dag = self._flow_factory.build(entity)
|
||||
if entity.state in [State.DEPLOYED, State.RUNNING] or (
|
||||
entity.version == "0.1.0" and entity.state == State.INITIALIZING
|
||||
):
|
||||
# Register the DAG
|
||||
self.dag_manager.register_dag(dag)
|
||||
# Update state to RUNNING
|
||||
entity.state = State.RUNNING
|
||||
entity.error_message = ""
|
||||
self.dao.update({"uid": entity.uid}, entity)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Load DAG({entity.name}, {entity.dag_id}) from db error: {str(e)}"
|
||||
@ -154,36 +203,48 @@ class Service(BaseService[ServeEntity, ServeRequest, ServerResponse]):
|
||||
flows = self.dbgpts_loader.get_flows()
|
||||
for flow in flows:
|
||||
try:
|
||||
# Try to build the dag from the request
|
||||
self._flow_factory.build(flow)
|
||||
# Set state to DEPLOYED
|
||||
flow.state = State.DEPLOYED
|
||||
exist_inst = self.get({"name": flow.name})
|
||||
if not exist_inst:
|
||||
self.create(flow)
|
||||
self.create_and_save_dag(flow, save_failed_flow=True)
|
||||
else:
|
||||
# TODO check version, must be greater than the exist one
|
||||
flow.uid = exist_inst.uid
|
||||
self.update(flow, check_editable=False)
|
||||
self.update_flow(flow, check_editable=False, save_failed_flow=True)
|
||||
except Exception as e:
|
||||
message = traceback.format_exc()
|
||||
logger.warning(
|
||||
f"Load DAG {flow.name} from dbgpts error: {str(e)}, detail: {message}"
|
||||
)
|
||||
|
||||
def update(
|
||||
self, request: ServeRequest, check_editable: bool = True
|
||||
def update_flow(
|
||||
self,
|
||||
request: ServeRequest,
|
||||
check_editable: bool = True,
|
||||
save_failed_flow: bool = False,
|
||||
) -> ServerResponse:
|
||||
"""Update a Flow entity
|
||||
|
||||
Args:
|
||||
request (ServeRequest): The request
|
||||
check_editable (bool): Whether to check the editable
|
||||
|
||||
save_failed_flow (bool): Whether to save the failed flow
|
||||
Returns:
|
||||
ServerResponse: The response
|
||||
"""
|
||||
new_state = request.state
|
||||
try:
|
||||
# Try to build the dag from the request
|
||||
dag = self._flow_factory.build(request)
|
||||
|
||||
request.flow_category = self._parse_flow_category(dag)
|
||||
except Exception as e:
|
||||
if save_failed_flow:
|
||||
request.state = State.LOAD_FAILED
|
||||
request.error_message = str(e)
|
||||
return self.dao.update({"uid": request.uid}, request)
|
||||
else:
|
||||
raise e
|
||||
# Build the query request from the request
|
||||
query_request = {"uid": request.uid}
|
||||
inst = self.get(query_request)
|
||||
@ -193,19 +254,26 @@ class Service(BaseService[ServeEntity, ServeRequest, ServerResponse]):
|
||||
raise HTTPException(
|
||||
status_code=403, detail=f"Flow {request.uid} is not editable"
|
||||
)
|
||||
old_state = inst.state
|
||||
if not State.can_change_state(old_state, new_state):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Flow {request.uid} state can't change from {old_state} to "
|
||||
f"{new_state}",
|
||||
)
|
||||
old_data: Optional[ServerResponse] = None
|
||||
try:
|
||||
request.flow_category = self._parse_flow_category(dag)
|
||||
update_obj = self.dao.update(query_request, update_request=request)
|
||||
old_data = self.delete(request.uid)
|
||||
old_data.state = old_state
|
||||
if not old_data:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Flow detail {request.uid} not found"
|
||||
)
|
||||
return self.create(update_obj)
|
||||
return self.create_and_save_dag(update_obj)
|
||||
except Exception as e:
|
||||
if old_data:
|
||||
self.create(old_data)
|
||||
self.create_and_save_dag(old_data)
|
||||
raise e
|
||||
|
||||
def get(self, request: QUERY_SPEC) -> Optional[ServerResponse]:
|
||||
|
@ -11,7 +11,7 @@ export type ApiResponse<T = any, D = any> = AxiosResponse<ResponseType<T>, D>;
|
||||
|
||||
export type SuccessTuple<T = any, D = any> = [null, T, ResponseType<T>, ApiResponse<T, D>];
|
||||
|
||||
export type FailedTuple = [Error | AxiosError, null, null, null];
|
||||
export type FailedTuple<T = any, D = any> = [Error | AxiosError<T, D>, null, null, null];
|
||||
|
||||
const ins = axios.create({
|
||||
baseURL: process.env.API_BASE_URL ?? '',
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { AxiosError } from 'axios';
|
||||
import { ApiResponse, FailedTuple, SuccessTuple } from '../';
|
||||
import { ApiResponse, FailedTuple, SuccessTuple, ResponseType } from '../';
|
||||
import { notification } from 'antd';
|
||||
|
||||
/**
|
||||
@ -28,10 +28,17 @@ export const apiInterceptors = <T = any, D = any>(promise: Promise<ApiResponse<T
|
||||
}
|
||||
return [null, data.data, data, response];
|
||||
})
|
||||
.catch<FailedTuple>((err: Error | AxiosError) => {
|
||||
.catch<FailedTuple<T, D>>((err: Error | AxiosError<T, D>) => {
|
||||
let errMessage = err.message;
|
||||
if (err instanceof AxiosError) {
|
||||
try {
|
||||
const { err_msg } = JSON.parse(err.request.response) as ResponseType<null>;
|
||||
err_msg && (errMessage = err_msg);
|
||||
} catch (e) {}
|
||||
}
|
||||
notification.error({
|
||||
message: `Request error`,
|
||||
description: err.message,
|
||||
description: errMessage,
|
||||
});
|
||||
return [err, null, null, null];
|
||||
});
|
||||
|
@ -1,7 +1,15 @@
|
||||
import { apiInterceptors, deleteFlowById, newDialogue } from '@/client/api';
|
||||
import { IFlow } from '@/types/flow';
|
||||
import { DeleteFilled, EditFilled, MessageFilled, WarningOutlined } from '@ant-design/icons';
|
||||
import { Modal } from 'antd';
|
||||
import {
|
||||
CopyFilled,
|
||||
DeleteFilled,
|
||||
EditFilled,
|
||||
ExclamationCircleFilled,
|
||||
ExclamationCircleOutlined,
|
||||
MessageFilled,
|
||||
WarningOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import { Modal, Tooltip } from 'antd';
|
||||
import React, { useContext } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import FlowPreview from './preview-flow';
|
||||
@ -13,9 +21,10 @@ import qs from 'querystring';
|
||||
interface FlowCardProps {
|
||||
flow: IFlow;
|
||||
deleteCallback: (uid: string) => void;
|
||||
onCopy: (flow: IFlow) => void;
|
||||
}
|
||||
|
||||
const FlowCard: React.FC<FlowCardProps> = ({ flow, deleteCallback }) => {
|
||||
const FlowCard: React.FC<FlowCardProps> = ({ flow, onCopy, deleteCallback }) => {
|
||||
const { model } = useContext(ChatContext);
|
||||
const { t } = useTranslation();
|
||||
const [modal, contextHolder] = Modal.useModal();
|
||||
@ -63,12 +72,28 @@ const FlowCard: React.FC<FlowCardProps> = ({ flow, deleteCallback }) => {
|
||||
<>
|
||||
{contextHolder}
|
||||
<GptCard
|
||||
className="w-96"
|
||||
className="w-[26rem] max-w-full"
|
||||
title={flow.name}
|
||||
desc={flow.description}
|
||||
tags={[
|
||||
{ text: flow.source, border: true, color: flow.source === 'DBGPT-WEB' ? 'green' : 'blue' },
|
||||
{ text: flow.editable ? 'Editable' : 'Can not Edit', color: flow.editable ? 'green' : 'gray' },
|
||||
{ text: flow.source, color: flow.source === 'DBGPT-WEB' ? 'green' : 'blue', border: true },
|
||||
{ text: flow.editable ? 'Editable' : 'Can not Edit', color: flow.editable ? 'green' : 'gray', border: true },
|
||||
{
|
||||
text: (
|
||||
<>
|
||||
{flow.error_message ? (
|
||||
<Tooltip placement="bottom" title={flow.error_message}>
|
||||
{flow.state}
|
||||
<ExclamationCircleOutlined className="ml-1" />
|
||||
</Tooltip>
|
||||
) : (
|
||||
flow.state
|
||||
)}
|
||||
</>
|
||||
),
|
||||
color: flow.state === 'load_failed' ? 'red' : flow.state === 'running' ? 'green' : 'blue',
|
||||
border: true,
|
||||
},
|
||||
]}
|
||||
operations={[
|
||||
{
|
||||
@ -81,6 +106,13 @@ const FlowCard: React.FC<FlowCardProps> = ({ flow, deleteCallback }) => {
|
||||
children: <EditFilled />,
|
||||
onClick: cardClick,
|
||||
},
|
||||
{
|
||||
label: t('Copy'),
|
||||
children: <CopyFilled />,
|
||||
onClick: () => {
|
||||
onCopy(flow);
|
||||
},
|
||||
},
|
||||
{
|
||||
label: t('Delete'),
|
||||
children: <DeleteFilled />,
|
||||
@ -88,7 +120,7 @@ const FlowCard: React.FC<FlowCardProps> = ({ flow, deleteCallback }) => {
|
||||
},
|
||||
]}
|
||||
>
|
||||
<div className="w-full h-[150px] shadow-[inset_0_0_16px_rgba(50,50,50,.05)]">
|
||||
<div className="w-full h-40 shadow-[inset_0_0_16px_rgba(50,50,50,.05)]">
|
||||
<FlowPreview flowData={flow.flow_data} />
|
||||
</div>
|
||||
</GptCard>
|
||||
|
@ -24,7 +24,7 @@ const edgeTypes = { buttonedge: ButtonEdge };
|
||||
const Canvas: React.FC<Props> = () => {
|
||||
const { t } = useTranslation();
|
||||
const [messageApi, contextHolder] = message.useMessage();
|
||||
const [form] = Form.useForm();
|
||||
const [form] = Form.useForm<IFlowUpdateParam>();
|
||||
const searchParams = useSearchParams();
|
||||
const id = searchParams?.get('id') || '';
|
||||
const reactFlow = useReactFlow();
|
||||
@ -35,6 +35,7 @@ const Canvas: React.FC<Props> = () => {
|
||||
const reactFlowWrapper = useRef<HTMLDivElement>(null);
|
||||
const [isModalVisible, setIsModalVisible] = useState(false);
|
||||
const [flowInfo, setFlowInfo] = useState<IFlowUpdateParam>();
|
||||
const [deploy, setDeploy] = useState(true);
|
||||
|
||||
async function getFlowData() {
|
||||
setLoading(true);
|
||||
@ -175,10 +176,11 @@ const Canvas: React.FC<Props> = () => {
|
||||
}
|
||||
|
||||
async function handleSaveFlow() {
|
||||
const { name, label, description = '', editable = false } = form.getFieldsValue();
|
||||
const { name, label, description = '', editable = false, state = 'deployed' } = form.getFieldsValue();
|
||||
console.log(form.getFieldsValue());
|
||||
const reactFlowObject = mapHumpToUnderline(reactFlow.toObject() as IFlowData);
|
||||
if (id) {
|
||||
const [, , res] = await apiInterceptors(updateFlowById(id, { name, label, description, editable, uid: id, flow_data: reactFlowObject }));
|
||||
const [, , res] = await apiInterceptors(updateFlowById(id, { name, label, description, editable, uid: id, flow_data: reactFlowObject, state }));
|
||||
setIsModalVisible(false);
|
||||
if (res?.success) {
|
||||
messageApi.success(t('save_flow_success'));
|
||||
@ -186,7 +188,7 @@ const Canvas: React.FC<Props> = () => {
|
||||
messageApi.error(res?.err_msg);
|
||||
}
|
||||
} else {
|
||||
const [_, res] = await apiInterceptors(addFlow({ name, label, description, editable, flow_data: reactFlowObject }));
|
||||
const [_, res] = await apiInterceptors(addFlow({ name, label, description, editable, flow_data: reactFlowObject, state }));
|
||||
if (res?.uid) {
|
||||
messageApi.success(t('save_flow_success'));
|
||||
const history = window.history;
|
||||
@ -271,7 +273,21 @@ const Canvas: React.FC<Props> = () => {
|
||||
<TextArea rows={3} />
|
||||
</Form.Item>
|
||||
<Form.Item label="Editable" name="editable" initialValue={flowInfo?.editable} valuePropName="checked">
|
||||
<Checkbox></Checkbox>
|
||||
<Checkbox />
|
||||
</Form.Item>
|
||||
<Form.Item hidden name="state">
|
||||
<Input />
|
||||
</Form.Item>
|
||||
<Form.Item label="Deploy">
|
||||
<Checkbox
|
||||
defaultChecked={flowInfo?.state === 'deployed' || flowInfo?.state === 'running'}
|
||||
value={deploy}
|
||||
onChange={(e) => {
|
||||
const val = e.target.checked;
|
||||
form.setFieldValue('state', val ? 'deployed' : 'developing');
|
||||
setDeploy(val);
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item wrapperCol={{ offset: 8, span: 16 }}>
|
||||
<Space>
|
||||
|
@ -1,16 +1,28 @@
|
||||
import { apiInterceptors, getFlows } from '@/client/api';
|
||||
import { addFlow, apiInterceptors, getFlows } from '@/client/api';
|
||||
import MyEmpty from '@/components/common/MyEmpty';
|
||||
import MuiLoading from '@/components/common/loading';
|
||||
import FlowCard from '@/components/flow/flow-card';
|
||||
import { IFlow } from '@/types/flow';
|
||||
import { IFlow, IFlowUpdateParam } from '@/types/flow';
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Button } from 'antd';
|
||||
import { Button, Checkbox, Form, Input, Modal, message } from 'antd';
|
||||
import Link from 'next/link';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
type FormFields = Pick<IFlow, 'label' | 'name'>;
|
||||
|
||||
function Flow() {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const [showModal, setShowModal] = useState(false);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [flowList, setFlowList] = useState<Array<IFlow>>([]);
|
||||
const [deploy, setDeploy] = useState(false);
|
||||
|
||||
const [messageApi, contextHolder] = message.useMessage();
|
||||
const [form] = Form.useForm<Pick<IFlow, 'label' | 'name'>>();
|
||||
|
||||
const copyFlowTemp = useRef<IFlow>();
|
||||
|
||||
async function getFlowList() {
|
||||
setLoading(true);
|
||||
@ -27,8 +39,34 @@ function Flow() {
|
||||
setFlowList((flows) => flows.filter((flow) => flow.uid !== uid));
|
||||
}
|
||||
|
||||
const handleCopy = (flow: IFlow) => {
|
||||
copyFlowTemp.current = flow;
|
||||
form.setFieldValue('label', `${flow.label} Copy`);
|
||||
form.setFieldValue('name', `${flow.name}_copy`);
|
||||
setDeploy(false);
|
||||
setShowModal(true);
|
||||
};
|
||||
|
||||
const onFinish = async (val: { name: string; label: string }) => {
|
||||
if (!copyFlowTemp.current) return;
|
||||
const { source, uid, dag_id, gmt_created, gmt_modified, state, ...params } = copyFlowTemp.current;
|
||||
const data: IFlowUpdateParam = {
|
||||
...params,
|
||||
editable: true,
|
||||
state: deploy ? 'deployed' : 'developing',
|
||||
...val,
|
||||
};
|
||||
const [err] = await apiInterceptors(addFlow(data));
|
||||
if (!err) {
|
||||
messageApi.success(t('save_flow_success'));
|
||||
setShowModal(false);
|
||||
getFlowList();
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="relative p-4 md:p-6 min-h-full overflow-y-auto">
|
||||
{contextHolder}
|
||||
<MuiLoading visible={loading} />
|
||||
<div className="mb-4">
|
||||
<Link href="/flow/canvas">
|
||||
@ -39,10 +77,41 @@ function Flow() {
|
||||
</div>
|
||||
<div className="flex flex-wrap gap-2 md:gap-4 justify-start items-stretch">
|
||||
{flowList.map((flow) => (
|
||||
<FlowCard key={flow.uid} flow={flow} deleteCallback={updateFlowList} />
|
||||
<FlowCard key={flow.uid} flow={flow} deleteCallback={updateFlowList} onCopy={handleCopy} />
|
||||
))}
|
||||
{flowList.length === 0 && <MyEmpty description="No flow found" />}
|
||||
</div>
|
||||
<Modal
|
||||
open={showModal}
|
||||
title="Copy AWEL Flow"
|
||||
onCancel={() => {
|
||||
setShowModal(false);
|
||||
}}
|
||||
footer={false}
|
||||
>
|
||||
<Form form={form} onFinish={onFinish} className="mt-6">
|
||||
<Form.Item name="name" label="Name" rules={[{ required: true }]}>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
<Form.Item name="label" label="Label" rules={[{ required: true }]}>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
<Form.Item label="Deploy">
|
||||
<Checkbox
|
||||
value={deploy}
|
||||
onChange={(e) => {
|
||||
const val = e.target.checked;
|
||||
setDeploy(val);
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<div className="flex justify-end">
|
||||
<Button type="primary" htmlType="submit">
|
||||
{t('Submit')}
|
||||
</Button>
|
||||
</div>
|
||||
</Form>
|
||||
</Modal>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
import { Node } from 'reactflow';
|
||||
|
||||
export type FlowState = 'deployed' | 'developing' | 'initializing' | 'testing' | 'disabled' | 'running' | 'load_failed';
|
||||
|
||||
export type IFlowUpdateParam = {
|
||||
name: string;
|
||||
label: string;
|
||||
@ -7,9 +9,13 @@ export type IFlowUpdateParam = {
|
||||
description: string;
|
||||
uid?: string;
|
||||
flow_data?: IFlowData;
|
||||
state?: FlowState;
|
||||
};
|
||||
|
||||
export type IFlow = {
|
||||
dag_id: string;
|
||||
gmt_created: string;
|
||||
gmt_modified: string;
|
||||
uid: string;
|
||||
name: string;
|
||||
label: string;
|
||||
@ -17,6 +23,8 @@ export type IFlow = {
|
||||
description: string;
|
||||
flow_data: IFlowData;
|
||||
source: string;
|
||||
state?: FlowState;
|
||||
error_message?: string;
|
||||
};
|
||||
|
||||
export type IFlowResponse = {
|
||||
|
Loading…
Reference in New Issue
Block a user