diff --git a/dbgpt/agent/core/memory/hybrid.py b/dbgpt/agent/core/memory/hybrid.py
index 0d7add35e..7316ca39c 100644
--- a/dbgpt/agent/core/memory/hybrid.py
+++ b/dbgpt/agent/core/memory/hybrid.py
@@ -8,9 +8,10 @@ consolidates important information over time.
import os.path
from concurrent.futures import Executor, ThreadPoolExecutor
from datetime import datetime
-from typing import TYPE_CHECKING, Generic, List, Optional, Tuple, Type
+from typing import Generic, List, Optional, Tuple, Type
from dbgpt.core import Embeddings, LLMClient
+from dbgpt.storage.vector_store.base import VectorStoreBase
from dbgpt.util.annotations import immutable, mutable
from .base import (
@@ -26,9 +27,6 @@ from .base import (
from .long_term import LongTermMemory
from .short_term import EnhancedShortTermMemory
-if TYPE_CHECKING:
- from dbgpt.storage.vector_store.connector import VectorStoreConnector
-
class HybridMemory(Memory, Generic[T]):
"""Hybrid memory for the agent."""
@@ -81,8 +79,10 @@ class HybridMemory(Memory, Generic[T]):
):
"""Create a hybrid memory from Chroma vector store."""
from dbgpt.configs.model_config import DATA_DIR
- from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
- from dbgpt.storage.vector_store.connector import VectorStoreConnector
+ from dbgpt.storage.vector_store.chroma_store import (
+ ChromaStore,
+ ChromaVectorConfig,
+ )
if not embeddings:
from dbgpt.rag.embedding import DefaultEmbeddingFactory
@@ -91,16 +91,15 @@ class HybridMemory(Memory, Generic[T]):
vstore_path = vstore_path or os.path.join(DATA_DIR, "agent_memory")
- vector_store_connector = VectorStoreConnector.from_default(
- vector_store_type="Chroma",
- embedding_fn=embeddings,
- vector_store_config=ChromaVectorConfig(
+ vector_store = ChromaStore(
+ ChromaVectorConfig(
name=vstore_name,
persist_path=vstore_path,
- ),
+ embedding_fn=embeddings,
+ )
)
return cls.from_vstore(
- vector_store_connector=vector_store_connector,
+ vector_store=vector_store,
embeddings=embeddings,
executor=executor,
now=now,
@@ -113,7 +112,7 @@ class HybridMemory(Memory, Generic[T]):
@classmethod
def from_vstore(
cls,
- vector_store_connector: "VectorStoreConnector",
+ vector_store: "VectorStoreBase",
embeddings: Optional[Embeddings] = None,
executor: Optional[Executor] = None,
now: Optional[datetime] = None,
@@ -124,7 +123,7 @@ class HybridMemory(Memory, Generic[T]):
):
"""Create a hybrid memory from vector store."""
if not embeddings:
- embeddings = vector_store_connector.current_embeddings
+ raise ValueError("embeddings is required.")
if not executor:
executor = ThreadPoolExecutor()
if not now:
@@ -139,7 +138,7 @@ class HybridMemory(Memory, Generic[T]):
if not long_term_memory:
long_term_memory = LongTermMemory(
executor,
- vector_store_connector,
+ vector_store,
now=now,
)
return cls(now, sensory_memory, short_term_memory, long_term_memory, **kwargs)
diff --git a/dbgpt/agent/core/memory/long_term.py b/dbgpt/agent/core/memory/long_term.py
index b479f86df..441ce2b2d 100644
--- a/dbgpt/agent/core/memory/long_term.py
+++ b/dbgpt/agent/core/memory/long_term.py
@@ -6,7 +6,7 @@ from typing import Generic, List, Optional
from dbgpt.core import Chunk
from dbgpt.rag.retriever.time_weighted import TimeWeightedEmbeddingRetriever
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.base import VectorStoreBase
from dbgpt.storage.vector_store.filters import MetadataFilters
from dbgpt.util.annotations import immutable, mutable
from dbgpt.util.executor_utils import blocking_func_to_async
@@ -70,7 +70,7 @@ class LongTermMemory(Memory, Generic[T]):
def __init__(
self,
executor: Executor,
- vector_store_connector: VectorStoreConnector,
+ vector_store: VectorStoreBase,
now: Optional[datetime] = None,
reflection_threshold: Optional[float] = None,
):
@@ -81,9 +81,9 @@ class LongTermMemory(Memory, Generic[T]):
self.forgetting: bool = False
self.reflection_threshold: Optional[float] = reflection_threshold
self.aggregate_importance: float = 0.0
- self._vector_store_connector = vector_store_connector
+ self._vector_store = vector_store
self.memory_retriever = LongTermRetriever(
- now=self.now, vector_store_connector=vector_store_connector
+ now=self.now, index_store=vector_store
)
@immutable
@@ -97,7 +97,7 @@ class LongTermMemory(Memory, Generic[T]):
m: LongTermMemory[T] = LongTermMemory(
now=now,
executor=self.executor,
- vector_store_connector=self._vector_store_connector.new_connector(new_name),
+ vector_store=self._vector_store,
reflection_threshold=self.reflection_threshold,
)
m._copy_from(self)
diff --git a/dbgpt/app/knowledge/api.py b/dbgpt/app/knowledge/api.py
index 7f21188ab..d1d334372 100644
--- a/dbgpt/app/knowledge/api.py
+++ b/dbgpt/app/knowledge/api.py
@@ -33,9 +33,9 @@ from dbgpt.rag.knowledge.base import ChunkStrategy
from dbgpt.rag.knowledge.factory import KnowledgeFactory
from dbgpt.rag.retriever.embedding import EmbeddingRetriever
from dbgpt.serve.rag.api.schemas import KnowledgeSyncRequest
+from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.serve.rag.service.service import Service
from dbgpt.storage.vector_store.base import VectorStoreConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from dbgpt.util.tracer import SpanType, root_tracer
logger = logging.getLogger(__name__)
@@ -310,7 +310,7 @@ def similar_query(space_name: str, query_request: KnowledgeQueryRequest):
vector_store_config=config,
)
retriever = EmbeddingRetriever(
- top_k=query_request.top_k, vector_store_connector=vector_store_connector
+ top_k=query_request.top_k, index_store=vector_store_connector.index_client
)
chunks = retriever.retrieve(query_request.query)
res = [
diff --git a/dbgpt/app/knowledge/service.py b/dbgpt/app/knowledge/service.py
index 266e8405d..b0c8c16e4 100644
--- a/dbgpt/app/knowledge/service.py
+++ b/dbgpt/app/knowledge/service.py
@@ -12,7 +12,6 @@ from dbgpt.app.knowledge.request.request import (
ChunkQueryRequest,
DocumentQueryRequest,
DocumentSummaryRequest,
- DocumentSyncRequest,
KnowledgeDocumentRequest,
KnowledgeSpaceRequest,
SpaceArgumentRequest,
@@ -24,19 +23,18 @@ from dbgpt.app.knowledge.request.response import (
)
from dbgpt.component import ComponentType
from dbgpt.configs.model_config import EMBEDDING_MODEL_CONFIG
-from dbgpt.core import Chunk, LLMClient
+from dbgpt.core import LLMClient
from dbgpt.model import DefaultLLMClient
from dbgpt.model.cluster import WorkerManagerFactory
-from dbgpt.rag.assembler.embedding import EmbeddingAssembler
from dbgpt.rag.assembler.summary import SummaryAssembler
from dbgpt.rag.chunk_manager import ChunkParameters
from dbgpt.rag.embedding.embedding_factory import EmbeddingFactory
from dbgpt.rag.knowledge.base import KnowledgeType
from dbgpt.rag.knowledge.factory import KnowledgeFactory
+from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.serve.rag.models.models import KnowledgeSpaceDao, KnowledgeSpaceEntity
from dbgpt.serve.rag.service.service import SyncStatus
from dbgpt.storage.vector_store.base import VectorStoreConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from dbgpt.util.executor_utils import ExecutorFactory, blocking_func_to_async
from dbgpt.util.tracer import root_tracer, trace
diff --git a/dbgpt/app/scene/chat_knowledge/v1/chat.py b/dbgpt/app/scene/chat_knowledge/v1/chat.py
index 2cfef3eed..ccc7bf32e 100644
--- a/dbgpt/app/scene/chat_knowledge/v1/chat.py
+++ b/dbgpt/app/scene/chat_knowledge/v1/chat.py
@@ -69,7 +69,7 @@ class ChatKnowledge(BaseChat):
"embedding_factory", EmbeddingFactory
)
from dbgpt.rag.retriever.embedding import EmbeddingRetriever
- from dbgpt.storage.vector_store.connector import VectorStoreConnector
+ from dbgpt.serve.rag.connector import VectorStoreConnector
embedding_fn = embedding_factory.create(
model_name=EMBEDDING_MODEL_CONFIG[CFG.EMBEDDING_MODEL]
@@ -116,7 +116,7 @@ class ChatKnowledge(BaseChat):
retriever_top_k = max(CFG.RERANK_TOP_K, 20)
self.embedding_retriever = EmbeddingRetriever(
top_k=retriever_top_k,
- vector_store_connector=vector_store_connector,
+ index_store=vector_store_connector.index_client,
query_rewrite=query_rewrite,
rerank=reranker,
)
diff --git a/dbgpt/app/static/404.html b/dbgpt/app/static/404.html
index 5758947d7..007b5434c 100644
--- a/dbgpt/app/static/404.html
+++ b/dbgpt/app/static/404.html
@@ -1 +1 @@
-
404: This page could not be found 404
This page could not be found.
\ No newline at end of file
+404: This page could not be found 404
This page could not be found.
\ No newline at end of file
diff --git a/dbgpt/app/static/404/index.html b/dbgpt/app/static/404/index.html
index 5758947d7..007b5434c 100644
--- a/dbgpt/app/static/404/index.html
+++ b/dbgpt/app/static/404/index.html
@@ -1 +1 @@
-404: This page could not be found 404
This page could not be found.
\ No newline at end of file
+404: This page could not be found 404
This page could not be found.
\ No newline at end of file
diff --git a/dbgpt/app/static/_next/static/8e7nQm3nKO3WF7o-A_evd/_buildManifest.js b/dbgpt/app/static/_next/static/Q_H6GwK11oUGrF3tq9O8x/_buildManifest.js
similarity index 65%
rename from dbgpt/app/static/_next/static/8e7nQm3nKO3WF7o-A_evd/_buildManifest.js
rename to dbgpt/app/static/_next/static/Q_H6GwK11oUGrF3tq9O8x/_buildManifest.js
index 2c4bc0334..81d76bc06 100644
--- a/dbgpt/app/static/_next/static/8e7nQm3nKO3WF7o-A_evd/_buildManifest.js
+++ b/dbgpt/app/static/_next/static/Q_H6GwK11oUGrF3tq9O8x/_buildManifest.js
@@ -1 +1 @@
-self.__BUILD_MANIFEST=function(s,a,c,e,t,n,d,f,k,h,i,u,b,j,p,g,o,l,r,_){return{__rewrites:{beforeFiles:[],afterFiles:[],fallback:[]},"/":[p,s,a,e,c,d,h,f,g,"static/chunks/9305-f44429d5185a9fc7.js","static/chunks/1353-705aa47cc2b94999.js","static/chunks/pages/index-9d77aed53ca78d15.js"],"/_error":["static/chunks/pages/_error-8095ba9e1bf12f30.js"],"/agent":[s,a,c,t,h,n,"static/chunks/pages/agent-2be7990da37f5165.js"],"/app":[i,s,a,e,c,t,n,u,b,o,j,"static/chunks/pages/app-8154f6fcced2f743.js"],"/chat":["static/chunks/pages/chat-5794854c0948b84c.js"],"/database":[s,a,e,c,t,n,f,k,"static/chunks/3718-e111d727d432bdd2.js","static/chunks/pages/database-7384ab94b08f23ff.js"],"/flow":[i,s,a,c,u,b,o,j,"static/chunks/pages/flow-33fe9f396642fb4c.js"],"/flow/canvas":[p,i,s,a,e,c,d,f,u,k,b,l,g,"static/chunks/1425-6e94ae18b1ac5a70.js",j,"static/chunks/pages/flow/canvas-644b6ee718585173.js"],"/knowledge":[r,s,a,e,c,t,h,n,f,k,_,l,"static/chunks/5237-1d36a3742424b75e.js","static/chunks/pages/knowledge-244aee7ebbad3668.js"],"/knowledge/chunk":[s,e,t,d,n,"static/chunks/pages/knowledge/chunk-625a32aed5f380e2.js"],"/knowledge/graph":["static/chunks/90912e1b-ed32608ee46ab40f.js","static/chunks/193-5e83ce3fd4f165ef.js","static/chunks/pages/knowledge/graph-9fb1ec6bf06d5108.js"],"/models":[r,s,a,e,c,k,"static/chunks/3444-30181eacc7980e66.js","static/chunks/pages/models-446238c56e41aa1b.js"],"/prompt":[s,a,e,c,d,_,"static/chunks/7184-3ca3f58327a6986a.js","static/chunks/7869-1a99e25b182b3eaa.js","static/chunks/pages/prompt-c44ac718b4d637c9.js"],sortedPages:["/","/_app","/_error","/agent","/app","/chat","/database","/flow","/flow/canvas","/knowledge","/knowledge/chunk","/knowledge/graph","/models","/prompt"]}}("static/chunks/2185-30f9d0578fa0d631.js","static/chunks/5503-c65f6d730754acc7.js","static/chunks/9479-21f588e1fd4e6b6d.js","static/chunks/1009-4b2af86bde623424.js","static/chunks/785-c3544abc036fc97d.js","static/chunks/5813-c6244a8eba7ef4ae.js","static/chunks/1647-8683da4db89d68c1.js","static/chunks/411-b5d3e7f64bee2335.js","static/chunks/8928-0e78def492052d13.js","static/chunks/4553-61740188e6a650a8.js","static/chunks/971df74e-7436ff4085ebb785.js","static/chunks/7434-29506257e67e8077.js","static/chunks/9924-5bce555f07385e1f.js","static/css/b4846eed11c4725f.css","static/chunks/29107295-75edf0bf34e24b1e.js","static/chunks/2487-cda9d2a2fd712a15.js","static/chunks/6165-93d23bc520382b2c.js","static/chunks/2282-96412afca1591c9a.js","static/chunks/75fc9c18-1d6133135d3d283c.js","static/chunks/5733-7ef320ab0f876a5e.js"),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();
\ No newline at end of file
+self.__BUILD_MANIFEST=function(s,c,a,e,t,n,f,d,k,h,i,u,b,j,p,g,o,l,r,_){return{__rewrites:{beforeFiles:[],afterFiles:[],fallback:[]},"/":[p,s,c,e,a,f,h,d,g,"static/chunks/9305-f44429d5185a9fc7.js","static/chunks/1353-705aa47cc2b94999.js","static/chunks/pages/index-0b2d61c1c6358f20.js"],"/_error":["static/chunks/pages/_error-8095ba9e1bf12f30.js"],"/agent":[s,c,a,t,h,n,"static/chunks/pages/agent-2be7990da37f5165.js"],"/app":[i,s,c,e,a,t,n,u,b,o,j,"static/chunks/pages/app-8154f6fcced2f743.js"],"/chat":["static/chunks/pages/chat-5794854c0948b84c.js"],"/database":[s,c,e,a,t,n,d,k,"static/chunks/3718-e111d727d432bdd2.js","static/chunks/pages/database-7384ab94b08f23ff.js"],"/flow":[i,s,c,a,u,b,o,j,"static/chunks/pages/flow-33fe9f396642fb4c.js"],"/flow/canvas":[p,i,s,c,e,a,f,d,u,k,b,l,g,"static/chunks/1425-6e94ae18b1ac5a70.js",j,"static/chunks/pages/flow/canvas-644b6ee718585173.js"],"/knowledge":[r,s,c,e,a,t,h,n,d,k,_,l,"static/chunks/5237-1d36a3742424b75e.js","static/chunks/pages/knowledge-223d50e9531bd961.js"],"/knowledge/chunk":[s,e,t,f,n,"static/chunks/pages/knowledge/chunk-625a32aed5f380e2.js"],"/knowledge/graph":["static/chunks/90912e1b-ed32608ee46ab40f.js","static/chunks/193-5e83ce3fd4f165ef.js","static/chunks/pages/knowledge/graph-9fb1ec6bf06d5108.js"],"/models":[r,s,c,e,a,k,"static/chunks/3444-30181eacc7980e66.js","static/chunks/pages/models-446238c56e41aa1b.js"],"/prompt":[s,c,e,a,f,_,"static/chunks/7184-3ca3f58327a6986a.js","static/chunks/7869-1a99e25b182b3eaa.js","static/chunks/pages/prompt-c44ac718b4d637c9.js"],sortedPages:["/","/_app","/_error","/agent","/app","/chat","/database","/flow","/flow/canvas","/knowledge","/knowledge/chunk","/knowledge/graph","/models","/prompt"]}}("static/chunks/2185-30f9d0578fa0d631.js","static/chunks/5503-c65f6d730754acc7.js","static/chunks/9479-21f588e1fd4e6b6d.js","static/chunks/1009-4b2af86bde623424.js","static/chunks/785-c3544abc036fc97d.js","static/chunks/5813-c6244a8eba7ef4ae.js","static/chunks/1647-8683da4db89d68c1.js","static/chunks/411-b5d3e7f64bee2335.js","static/chunks/8928-0e78def492052d13.js","static/chunks/4553-61740188e6a650a8.js","static/chunks/971df74e-7436ff4085ebb785.js","static/chunks/7434-29506257e67e8077.js","static/chunks/9924-5bce555f07385e1f.js","static/css/b4846eed11c4725f.css","static/chunks/29107295-75edf0bf34e24b1e.js","static/chunks/2487-cda9d2a2fd712a15.js","static/chunks/6165-93d23bc520382b2c.js","static/chunks/2282-96412afca1591c9a.js","static/chunks/75fc9c18-1d6133135d3d283c.js","static/chunks/5733-7ef320ab0f876a5e.js"),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();
\ No newline at end of file
diff --git a/dbgpt/app/static/_next/static/8e7nQm3nKO3WF7o-A_evd/_ssgManifest.js b/dbgpt/app/static/_next/static/Q_H6GwK11oUGrF3tq9O8x/_ssgManifest.js
similarity index 100%
rename from dbgpt/app/static/_next/static/8e7nQm3nKO3WF7o-A_evd/_ssgManifest.js
rename to dbgpt/app/static/_next/static/Q_H6GwK11oUGrF3tq9O8x/_ssgManifest.js
diff --git a/dbgpt/app/static/_next/static/chunks/4134.1f69d7fdee641af7.js b/dbgpt/app/static/_next/static/chunks/4134.d59cf294103a4db2.js
similarity index 87%
rename from dbgpt/app/static/_next/static/chunks/4134.1f69d7fdee641af7.js
rename to dbgpt/app/static/_next/static/chunks/4134.d59cf294103a4db2.js
index 6cd94a020..c67f73a44 100644
--- a/dbgpt/app/static/_next/static/chunks/4134.1f69d7fdee641af7.js
+++ b/dbgpt/app/static/_next/static/chunks/4134.d59cf294103a4db2.js
@@ -1 +1 @@
-"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[4134],{12545:function(e,t,l){l.r(t),l.d(t,{default:function(){return eF}});var a=l(85893),s=l(67294),r=l(2093),n=l(43446),o=l(39332),c=l(74434),i=l(24019),d=l(50888),u=l(97937),m=l(63606),x=l(50228),h=l(87547),p=l(89035),g=l(92975),v=l(12767),f=l(94184),j=l.n(f),b=l(66309),y=l(81799),w=l(41468),N=l(29158),_=l(98165),Z=l(14079),k=l(38426),C=l(45396),S=l(44442),P=l(55241),E=l(39156),R=l(71577),D=l(2453),O=l(57132),I=l(36096),M=l(79166),q=l(93179),A=l(20640),J=l.n(A);function L(e){let{code:t,light:l,dark:r,language:n,customStyle:o}=e,{mode:c}=(0,s.useContext)(w.p);return(0,a.jsxs)("div",{className:"relative",children:[(0,a.jsx)(R.ZP,{className:"absolute right-3 top-2 text-gray-300 hover:!text-gray-200 bg-gray-700",type:"text",icon:(0,a.jsx)(O.Z,{}),onClick:()=>{let e=J()(t);D.ZP[e?"success":"error"](e?"Copy success":"Copy failed")}}),(0,a.jsx)(q.Z,{customStyle:o,language:n,style:"dark"===c?null!=r?r:I.Z:null!=l?l:M.Z,children:t})]})}var F=l(14313),z=l(47221),T=function(e){let{data:t}=e;return t&&t.length?(0,a.jsx)(z.Z,{bordered:!0,className:"my-3",expandIcon:e=>{let{isActive:t}=e;return(0,a.jsx)(F.Z,{rotate:t?90:0})},items:t.map((e,t)=>({key:t,label:(0,a.jsxs)("div",{className:"whitespace-normal",children:[(0,a.jsxs)("span",{children:[e.name," - ",e.agent]}),"complete"===e.status?(0,a.jsx)(m.Z,{className:"!text-green-500 ml-2"}):(0,a.jsx)(i.Z,{className:"!text-gray-500 ml-2"})]}),children:(0,a.jsx)(g.D,{components:en,children:e.markdown})}))}):null},G=l(32198),$=function(e){let{data:t}=e;return t&&t.length?(0,a.jsx)(a.Fragment,{children:t.map((e,t)=>(0,a.jsxs)("div",{className:"rounded my-4 md:my-6",children:[(0,a.jsxs)("div",{className:"flex items-center mb-3 text-sm",children:[e.model?(0,y.A)(e.model):(0,a.jsx)("div",{className:"rounded-full w-6 h-6 bg-gray-100"}),(0,a.jsxs)("div",{className:"ml-2 opacity-70",children:[e.sender,(0,a.jsx)(G.Z,{className:"mx-2 text-base"}),e.receiver]})]}),(0,a.jsx)("div",{className:"whitespace-normal text-sm",children:(0,a.jsx)(g.D,{components:en,children:e.markdown})})]},t))}):null},V=l(62418),H=function(e){let{data:t}=e;return(0,a.jsxs)("div",{className:"rounded overflow-hidden",children:[(0,a.jsx)("div",{className:"p-3 text-white bg-red-500 whitespace-normal",children:t.display_type}),(0,a.jsxs)("div",{className:"p-3 bg-red-50",children:[(0,a.jsx)("div",{className:"mb-2 whitespace-normal",children:t.thought}),(0,a.jsx)(L,{code:(0,V._m)(t.sql),language:"sql"})]})]})},U=l(8497),B=function(e){var t;let{data:l,type:s,sql:r}=e,n=(null==l?void 0:l[0])?null===(t=Object.keys(null==l?void 0:l[0]))||void 0===t?void 0:t.map(e=>({title:e,dataIndex:e,key:e})):[],o={key:"chart",label:"Chart",children:(0,a.jsx)(U._,{data:l,chartType:(0,U.a)(s)})},c={key:"sql",label:"SQL",children:(0,a.jsx)(L,{language:"sql",code:(0,V._m)(r)})},i={key:"data",label:"Data",children:(0,a.jsx)(C.Z,{dataSource:l,columns:n,scroll:{x:"auto"}})},d="response_table"===s?[i,c]:[o,c,i];return(0,a.jsx)(S.Z,{defaultActiveKey:"response_table"===s?"data":"chart",items:d,size:"small"})},Q=function(e){let{data:t}=e;return(0,a.jsx)(B,{data:t.data,type:t.type,sql:t.sql})};let W=[[2],[1,2],[1,3],[2,1,2],[2,1,3],[3,1,3],[3,2,3]];var K=function(e){let{data:t}=e,l=(0,s.useMemo)(()=>{if(t.chart_count>1){let e=W[t.chart_count-2],l=0;return e.map(e=>{let a=t.data.slice(l,l+e);return l=e,a})}return[t.data]},[t.data,t.chart_count]);return(0,a.jsx)("div",{className:"flex flex-col gap-3",children:l.map((e,t)=>(0,a.jsx)("div",{className:"flex gap-3",children:e.map((e,t)=>(0,a.jsxs)("div",{className:"flex flex-1 flex-col justify-between p-4 rounded border border-gray-200 dark:border-gray-500 whitespace-normal",children:[(0,a.jsxs)("div",{children:[e.title&&(0,a.jsx)("div",{className:"mb-2 text-lg",children:e.title}),e.describe&&(0,a.jsx)("div",{className:"mb-4 text-sm text-gray-500",children:e.describe})]}),(0,a.jsx)(E._z,{data:e.data,chartType:(0,E.aG)(e.type)})]},"chart-".concat(t)))},"row-".concat(t)))})};let X={todo:{bgClass:"bg-gray-500",icon:(0,a.jsx)(i.Z,{className:"ml-2"})},runing:{bgClass:"bg-blue-500",icon:(0,a.jsx)(d.Z,{className:"ml-2"})},failed:{bgClass:"bg-red-500",icon:(0,a.jsx)(u.Z,{className:"ml-2"})},complete:{bgClass:"bg-green-500",icon:(0,a.jsx)(m.Z,{className:"ml-2"})}};var Y=function(e){var t,l;let{data:s}=e,{bgClass:r,icon:n}=null!==(t=X[s.status])&&void 0!==t?t:{};return(0,a.jsxs)("div",{className:"bg-theme-light dark:bg-theme-dark-container rounded overflow-hidden my-2 flex flex-col lg:max-w-[80%]",children:[(0,a.jsxs)("div",{className:j()("flex px-4 md:px-6 py-2 items-center text-white text-sm",r),children:[s.name,n]}),s.result?(0,a.jsx)("div",{className:"px-4 md:px-6 py-4 text-sm whitespace-normal",children:(0,a.jsx)(g.D,{components:en,rehypePlugins:[v.Z],children:null!==(l=s.result)&&void 0!==l?l:""})}):(0,a.jsx)("div",{className:"px-4 md:px-6 py-4 text-sm",children:s.err_msg})]})},ee=l(76199),et=l(67421),el=l(24136),ea=function(e){let{data:t}=e,{t:l}=(0,et.$G)(),[r,n]=(0,s.useState)(0);return(0,a.jsxs)("div",{className:"bg-[#EAEAEB] rounded overflow-hidden border border-theme-primary dark:bg-theme-dark text-sm",children:[(0,a.jsxs)("div",{children:[(0,a.jsx)("div",{className:"flex",children:t.code.map((e,t)=>(0,a.jsxs)("div",{className:j()("px-4 py-2 text-[#121417] dark:text-white cursor-pointer",{"bg-white dark:bg-theme-dark-container":t===r}),onClick:()=>{n(t)},children:["CODE ",t+1,": ",e[0]]},t))}),t.code.length&&(0,a.jsx)(L,{language:t.code[r][0],code:t.code[r][1],customStyle:{maxHeight:300,margin:0},light:el.Z,dark:M.Z})]}),(0,a.jsxs)("div",{children:[(0,a.jsx)("div",{className:"flex",children:(0,a.jsxs)("div",{className:"bg-white dark:bg-theme-dark-container px-4 py-2 text-[#121417] dark:text-white",children:[l("Terminal")," ",t.exit_success?(0,a.jsx)(m.Z,{className:"text-green-600"}):(0,a.jsx)(u.Z,{className:"text-red-600"})]})}),(0,a.jsx)("div",{className:"p-4 max-h-72 overflow-y-auto whitespace-normal bg-white dark:dark:bg-theme-dark",children:(0,a.jsx)(g.D,{components:en,remarkPlugins:[ee.Z],children:t.log})})]})]})};let es=["custom-view","chart-view","references","summary"],er={code(e){let{inline:t,node:l,className:s,children:r,style:n,...o}=e,c=String(r),{context:i,matchValues:d}=function(e){let t=es.reduce((t,l)=>{let a=RegExp("<".concat(l,"[^>]*/?>"),"gi");return e=e.replace(a,e=>(t.push(e),"")),t},[]);return{context:e,matchValues:t}}(c),u=(null==s?void 0:s.replace("language-",""))||"javascript";if("agent-plans"===u)try{let e=JSON.parse(c);return(0,a.jsx)(T,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("agent-messages"===u)try{let e=JSON.parse(c);return(0,a.jsx)($,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("vis-convert-error"===u)try{let e=JSON.parse(c);return(0,a.jsx)(H,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("vis-dashboard"===u)try{let e=JSON.parse(c);return(0,a.jsx)(K,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("vis-chart"===u)try{let e=JSON.parse(c);return(0,a.jsx)(Q,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("vis-plugin"===u)try{let e=JSON.parse(c);return(0,a.jsx)(Y,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("vis-code"===u)try{let e=JSON.parse(c);return(0,a.jsx)(ea,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}return(0,a.jsxs)(a.Fragment,{children:[t?(0,a.jsx)("code",{...o,style:n,className:"p-1 mx-1 rounded bg-theme-light dark:bg-theme-dark text-sm",children:r}):(0,a.jsx)(L,{code:i,language:u}),(0,a.jsx)(g.D,{components:er,rehypePlugins:[v.Z],children:d.join("\n")})]})},ul(e){let{children:t}=e;return(0,a.jsx)("ul",{className:"py-1",children:t})},ol(e){let{children:t}=e;return(0,a.jsx)("ol",{className:"py-1",children:t})},li(e){let{children:t,ordered:l}=e;return(0,a.jsx)("li",{className:"text-sm leading-7 ml-5 pl-2 text-gray-600 dark:text-gray-300 ".concat(l?"list-decimal":"list-disc"),children:t})},table(e){let{children:t}=e;return(0,a.jsx)("table",{className:"my-2 rounded-tl-md rounded-tr-md max-w-full bg-white dark:bg-gray-800 text-sm rounded-lg overflow-hidden",children:t})},thead(e){let{children:t}=e;return(0,a.jsx)("thead",{className:"bg-[#fafafa] dark:bg-black font-semibold",children:t})},th(e){let{children:t}=e;return(0,a.jsx)("th",{className:"!text-left p-4",children:t})},td(e){let{children:t}=e;return(0,a.jsx)("td",{className:"p-4 border-t border-[#f0f0f0] dark:border-gray-700",children:t})},h1(e){let{children:t}=e;return(0,a.jsx)("h3",{className:"text-2xl font-bold my-4 border-b border-slate-300 pb-4",children:t})},h2(e){let{children:t}=e;return(0,a.jsx)("h3",{className:"text-xl font-bold my-3",children:t})},h3(e){let{children:t}=e;return(0,a.jsx)("h3",{className:"text-lg font-semibold my-2",children:t})},h4(e){let{children:t}=e;return(0,a.jsx)("h3",{className:"text-base font-semibold my-1",children:t})},a(e){let{children:t,href:l}=e;return(0,a.jsxs)("div",{className:"inline-block text-blue-600 dark:text-blue-400",children:[(0,a.jsx)(N.Z,{className:"mr-1"}),(0,a.jsx)("a",{href:l,target:"_blank",children:t})]})},img(e){let{src:t,alt:l}=e;return(0,a.jsx)("div",{children:(0,a.jsx)(k.Z,{className:"min-h-[1rem] max-w-full max-h-full border rounded",src:t,alt:l,placeholder:(0,a.jsx)(b.Z,{icon:(0,a.jsx)(_.Z,{spin:!0}),color:"processing",children:"Image Loading..."}),fallback:"/images/fallback.png"})})},blockquote(e){let{children:t}=e;return(0,a.jsx)("blockquote",{className:"py-4 px-6 border-l-4 border-blue-600 rounded bg-white my-2 text-gray-500 dark:bg-slate-800 dark:text-gray-200 dark:border-white shadow-sm",children:t})},"chart-view":function(e){var t,l,s;let r,{content:n,children:o}=e;try{r=JSON.parse(n)}catch(e){console.log(e,n),r={type:"response_table",sql:"",data:[]}}let c=(null==r?void 0:null===(t=r.data)||void 0===t?void 0:t[0])?null===(l=Object.keys(null==r?void 0:null===(s=r.data)||void 0===s?void 0:s[0]))||void 0===l?void 0:l.map(e=>({title:e,dataIndex:e,key:e})):[],i={key:"chart",label:"Chart",children:(0,a.jsx)(E._z,{data:null==r?void 0:r.data,chartType:(0,E.aG)(null==r?void 0:r.type)})},d={key:"sql",label:"SQL",children:(0,a.jsx)(L,{code:(0,V._m)(null==r?void 0:r.sql,"mysql"),language:"sql"})},u={key:"data",label:"Data",children:(0,a.jsx)(C.Z,{dataSource:null==r?void 0:r.data,columns:c})},m=(null==r?void 0:r.type)==="response_table"?[u,d]:[i,d,u];return(0,a.jsxs)("div",{children:[(0,a.jsx)(S.Z,{defaultActiveKey:(null==r?void 0:r.type)==="response_table"?"data":"chart",items:m,size:"small"}),o]})},references:function(e){let t,{title:l,references:s,children:r}=e;if(r)try{l=(t=JSON.parse(r)).title,s=t.references}catch(e){return console.log("parse references failed",e),(0,a.jsx)("p",{className:"text-sm text-red-500",children:"Render Reference Error!"})}else try{s=JSON.parse(s)}catch(e){return console.log("parse references failed",e),(0,a.jsx)("p",{className:"text-sm text-red-500",children:"Render Reference Error!"})}return!s||(null==s?void 0:s.length)<1?null:(0,a.jsxs)("div",{className:"border-t-[1px] border-gray-300 mt-3 py-2",children:[(0,a.jsxs)("p",{className:"text-sm text-gray-500 dark:text-gray-400 mb-2",children:[(0,a.jsx)(N.Z,{className:"mr-2"}),(0,a.jsx)("span",{className:"font-semibold",children:l})]}),s.map((e,t)=>{var l;return(0,a.jsxs)("div",{className:"text-sm font-normal block ml-2 h-6 leading-6 overflow-hidden",children:[(0,a.jsxs)("span",{className:"inline-block w-6",children:["[",t+1,"]"]}),(0,a.jsx)("span",{className:"mr-2 lg:mr-4 text-blue-400",children:e.name}),null==e?void 0:null===(l=e.chunks)||void 0===l?void 0:l.map((t,l)=>(0,a.jsxs)("span",{children:["object"==typeof t?(0,a.jsx)(P.Z,{content:(0,a.jsxs)("div",{className:"max-w-4xl",children:[(0,a.jsx)("p",{className:"mt-2 font-bold mr-2 border-t border-gray-500 pt-2",children:"Content:"}),(0,a.jsx)("p",{children:(null==t?void 0:t.content)||"No Content"}),(0,a.jsx)("p",{className:"mt-2 font-bold mr-2 border-t border-gray-500 pt-2",children:"MetaData:"}),(0,a.jsx)("p",{children:(null==t?void 0:t.meta_info)||"No MetaData"}),(0,a.jsx)("p",{className:"mt-2 font-bold mr-2 border-t border-gray-500 pt-2",children:"Score:"}),(0,a.jsx)("p",{children:(null==t?void 0:t.recall_score)||""})]}),title:"Chunk Information",children:(0,a.jsx)("span",{className:"cursor-pointer text-blue-500 ml-2",children:null==t?void 0:t.id},"chunk_content_".concat(null==t?void 0:t.id))}):(0,a.jsx)("span",{className:"cursor-pointer text-blue-500 ml-2",children:t},"chunk_id_".concat(t)),l<(null==e?void 0:e.chunks.length)-1&&(0,a.jsx)("span",{children:","},"chunk_comma_".concat(l))]},"chunk_".concat(l)))]},"file_".concat(t))})]})},summary:function(e){let{children:t}=e;return(0,a.jsxs)("div",{children:[(0,a.jsxs)("p",{className:"mb-2",children:[(0,a.jsx)(Z.Z,{className:"mr-2"}),(0,a.jsx)("span",{className:"font-semibold",children:"Document Summary"})]}),(0,a.jsx)("div",{children:t})]})}};var en=er;let eo={todo:{bgClass:"bg-gray-500",icon:(0,a.jsx)(i.Z,{className:"ml-2"})},runing:{bgClass:"bg-blue-500",icon:(0,a.jsx)(d.Z,{className:"ml-2"})},failed:{bgClass:"bg-red-500",icon:(0,a.jsx)(u.Z,{className:"ml-2"})},completed:{bgClass:"bg-green-500",icon:(0,a.jsx)(m.Z,{className:"ml-2"})}};function ec(e){return e.replaceAll("\\n","\n").replace(/]+)>/gi,"").replace(/]+)>/gi," ")}var ei=(0,s.memo)(function(e){let{children:t,content:l,isChartChat:r,onLinkClick:n}=e,{scene:o}=(0,s.useContext)(w.p),{context:c,model_name:i,role:d}=l,u="view"===d,{relations:m,value:f,cachePluginContext:N}=(0,s.useMemo)(()=>{if("string"!=typeof c)return{relations:[],value:"",cachePluginContext:[]};let[e,t]=c.split(" relations:"),l=t?t.split(","):[],a=[],s=0,r=e.replace(/]*>[^<]*<\/dbgpt-view>/gi,e=>{try{var t;let l=e.replaceAll("\n","\\n").replace(/<[^>]*>|<\/[^>]*>/gm,""),r=JSON.parse(l),n="".concat(s," ");return a.push({...r,result:ec(null!==(t=r.result)&&void 0!==t?t:"")}),s++,n}catch(t){return console.log(t.message,t),e}});return{relations:l,cachePluginContext:a,value:r}},[c]),_=(0,s.useMemo)(()=>({"custom-view"(e){var t;let{children:l}=e,s=+l.toString();if(!N[s])return l;let{name:r,status:n,err_msg:o,result:c}=N[s],{bgClass:i,icon:d}=null!==(t=eo[n])&&void 0!==t?t:{};return(0,a.jsxs)("div",{className:"bg-white dark:bg-[#212121] rounded-lg overflow-hidden my-2 flex flex-col lg:max-w-[80%]",children:[(0,a.jsxs)("div",{className:j()("flex px-4 md:px-6 py-2 items-center text-white text-sm",i),children:[r,d]}),c?(0,a.jsx)("div",{className:"px-4 md:px-6 py-4 text-sm",children:(0,a.jsx)(g.D,{components:en,rehypePlugins:[v.Z],children:null!=c?c:""})}):(0,a.jsx)("div",{className:"px-4 md:px-6 py-4 text-sm",children:o})]})}}),[c,N]);return u||c?(0,a.jsxs)("div",{className:j()("relative flex flex-wrap w-full p-2 md:p-4 rounded-xl break-words",{"bg-white dark:bg-[#232734]":u,"lg:w-full xl:w-full pl-0":["chat_with_db_execute","chat_dashboard"].includes(o)}),children:[(0,a.jsx)("div",{className:"mr-2 flex flex-shrink-0 items-center justify-center h-7 w-7 rounded-full text-lg sm:mr-4",children:u?(0,y.A)(i)||(0,a.jsx)(x.Z,{}):(0,a.jsx)(h.Z,{})}),(0,a.jsxs)("div",{className:"flex-1 overflow-hidden items-center text-md leading-8 pb-2",children:[!u&&"string"==typeof c&&c,u&&r&&"object"==typeof c&&(0,a.jsxs)("div",{children:["[".concat(c.template_name,"]: "),(0,a.jsxs)("span",{className:"text-theme-primary cursor-pointer",onClick:n,children:[(0,a.jsx)(p.Z,{className:"mr-1"}),c.template_introduce||"More Details"]})]}),u&&"string"==typeof c&&(0,a.jsx)(g.D,{components:{...en,..._},rehypePlugins:[v.Z],children:ec(f)}),!!(null==m?void 0:m.length)&&(0,a.jsx)("div",{className:"flex flex-wrap mt-2",children:null==m?void 0:m.map((e,t)=>(0,a.jsx)(b.Z,{color:"#108ee9",children:e},e+t))})]}),t]}):(0,a.jsx)("div",{className:"h-12"})}),ed=l(59301),eu=l(41132),em=l(74312),ex=l(3414),eh=l(72868),ep=l(59562),eg=l(14553),ev=l(25359),ef=l(7203),ej=l(48665),eb=l(26047),ey=l(99056),ew=l(57814),eN=l(63955),e_=l(33028),eZ=l(40911),ek=l(66478),eC=l(83062),eS=l(89182),eP=e=>{var t;let{conv_index:l,question:r,knowledge_space:n,select_param:o}=e,{t:c}=(0,et.$G)(),{chatId:i}=(0,s.useContext)(w.p),[d,u]=(0,s.useState)(""),[m,x]=(0,s.useState)(4),[h,p]=(0,s.useState)(""),g=(0,s.useRef)(null),[v,f]=D.ZP.useMessage(),j=(0,s.useCallback)((e,t)=>{t?(0,eS.Vx)((0,eS.Eb)(i,l)).then(e=>{var t,l,a,s;let r=null!==(t=e[1])&&void 0!==t?t:{};u(null!==(l=r.ques_type)&&void 0!==l?l:""),x(parseInt(null!==(a=r.score)&&void 0!==a?a:"4")),p(null!==(s=r.messages)&&void 0!==s?s:"")}).catch(e=>{console.log(e)}):(u(""),x(4),p(""))},[i,l]),b=(0,em.Z)(ex.Z)(e=>{let{theme:t}=e;return{backgroundColor:"dark"===t.palette.mode?"#FBFCFD":"#0E0E10",...t.typography["body-sm"],padding:t.spacing(1),display:"flex",alignItems:"center",justifyContent:"center",borderRadius:4,width:"100%",height:"100%"}});return(0,a.jsxs)(eh.L,{onOpenChange:j,children:[f,(0,a.jsx)(eC.Z,{title:c("Rating"),children:(0,a.jsx)(ep.Z,{slots:{root:eg.ZP},slotProps:{root:{variant:"plain",color:"primary"}},sx:{borderRadius:40},children:(0,a.jsx)(ed.Z,{})})}),(0,a.jsxs)(ev.Z,{children:[(0,a.jsx)(ef.Z,{disabled:!0,sx:{minHeight:0}}),(0,a.jsx)(ej.Z,{sx:{width:"100%",maxWidth:350,display:"grid",gap:3,padding:1},children:(0,a.jsx)("form",{onSubmit:e=>{e.preventDefault();let t={conv_uid:i,conv_index:l,question:r,knowledge_space:n,score:m,ques_type:d,messages:h};console.log(t),(0,eS.Vx)((0,eS.VC)({data:t})).then(e=>{v.open({type:"success",content:"save success"})}).catch(e=>{v.open({type:"error",content:"save error"})})},children:(0,a.jsxs)(eb.Z,{container:!0,spacing:.5,columns:13,sx:{flexGrow:1},children:[(0,a.jsx)(eb.Z,{xs:3,children:(0,a.jsx)(b,{children:c("Q_A_Category")})}),(0,a.jsx)(eb.Z,{xs:10,children:(0,a.jsx)(ey.Z,{action:g,value:d,placeholder:"Choose one…",onChange:(e,t)=>u(null!=t?t:""),...d&&{endDecorator:(0,a.jsx)(eg.ZP,{size:"sm",variant:"plain",color:"neutral",onMouseDown:e=>{e.stopPropagation()},onClick:()=>{var e;u(""),null===(e=g.current)||void 0===e||e.focusVisible()},children:(0,a.jsx)(eu.Z,{})}),indicator:null},sx:{width:"100%"},children:o&&(null===(t=Object.keys(o))||void 0===t?void 0:t.map(e=>(0,a.jsx)(ew.Z,{value:e,children:o[e]},e)))})}),(0,a.jsx)(eb.Z,{xs:3,children:(0,a.jsx)(b,{children:(0,a.jsx)(eC.Z,{title:(0,a.jsx)(ej.Z,{children:(0,a.jsx)("div",{children:c("feed_back_desc")})}),variant:"solid",placement:"left",children:c("Q_A_Rating")})})}),(0,a.jsx)(eb.Z,{xs:10,sx:{pl:0,ml:0},children:(0,a.jsx)(eN.Z,{"aria-label":"Custom",step:1,min:0,max:5,valueLabelFormat:function(e){return({0:c("Lowest"),1:c("Missed"),2:c("Lost"),3:c("Incorrect"),4:c("Verbose"),5:c("Best")})[e]},valueLabelDisplay:"on",marks:[{value:0,label:"0"},{value:1,label:"1"},{value:2,label:"2"},{value:3,label:"3"},{value:4,label:"4"},{value:5,label:"5"}],sx:{width:"90%",pt:3,m:2,ml:1},onChange:e=>{var t;return x(null===(t=e.target)||void 0===t?void 0:t.value)},value:m})}),(0,a.jsx)(eb.Z,{xs:13,children:(0,a.jsx)(e_.Z,{placeholder:c("Please_input_the_text"),value:h,onChange:e=>p(e.target.value),minRows:2,maxRows:4,endDecorator:(0,a.jsx)(eZ.ZP,{level:"body-xs",sx:{ml:"auto"},children:c("input_count")+h.length+c("input_unit")}),sx:{width:"100%",fontSize:14}})}),(0,a.jsx)(eb.Z,{xs:13,children:(0,a.jsx)(ek.Z,{type:"submit",variant:"outlined",sx:{width:"100%",height:"100%"},children:c("submit")})})]})})})]})]})},eE=l(36147),eR=l(96486),eD=l(19409),eO=l(87740),eI=l(80573),eM=(0,s.memo)(function(e){let{content:t}=e,{scene:l}=(0,s.useContext)(w.p),r="view"===t.role;return(0,a.jsx)("div",{className:j()("relative w-full p-2 md:p-4 rounded-xl break-words",{"bg-white dark:bg-[#232734]":r,"lg:w-full xl:w-full pl-0":["chat_with_db_execute","chat_dashboard"].includes(l)}),children:r?(0,a.jsx)(g.D,{components:en,rehypePlugins:[v.Z],children:t.context.replace(/]+)>/gi,"").replace(/]+)>/gi," ")}):(0,a.jsx)("div",{className:"",children:t.context})})}),eq=l(91085),eA=e=>{var t,l;let{messages:n,onSubmit:i}=e,{dbParam:d,currentDialogue:u,scene:m,model:x,refreshDialogList:h,chatId:p,agent:g,docId:v}=(0,s.useContext)(w.p),{t:f}=(0,et.$G)(),b=(0,o.useSearchParams)(),N=null!==(t=b&&b.get("select_param"))&&void 0!==t?t:"",_=null!==(l=b&&b.get("spaceNameOriginal"))&&void 0!==l?l:"",[Z,k]=(0,s.useState)(!1),[C,S]=(0,s.useState)(!1),[P,E]=(0,s.useState)(n),[R,I]=(0,s.useState)(""),[M,q]=(0,s.useState)(),A=(0,s.useRef)(null),L=(0,s.useMemo)(()=>"chat_dashboard"===m,[m]),F=(0,eI.Z)(),z=(0,s.useMemo)(()=>{switch(m){case"chat_agent":return g;case"chat_excel":return null==u?void 0:u.select_param;case"chat_flow":return N;default:return _||d}},[m,g,u,d,_,N]),T=async e=>{if(!Z&&e.trim()){if("chat_agent"===m&&!g){D.ZP.warning(f("choice_agent_tip"));return}try{k(!0),await i(e,{select_param:null!=z?z:""})}finally{k(!1)}}},G=e=>{try{return JSON.parse(e)}catch(t){return e}},[$,H]=D.ZP.useMessage(),U=async e=>{let t=null==e?void 0:e.replace(/\trelations:.*/g,""),l=J()(t);l?t?$.open({type:"success",content:f("Copy_success")}):$.open({type:"warning",content:f("Copy_nothing")}):$.open({type:"error",content:f("Copry_error")})},B=async()=>{!Z&&v&&(k(!0),await F(v),k(!1))};return(0,r.Z)(async()=>{let e=(0,V.a_)();e&&e.id===p&&(await T(e.message),h(),localStorage.removeItem(V.rU))},[p]),(0,s.useEffect)(()=>{let e=n;L&&(e=(0,eR.cloneDeep)(n).map(e=>((null==e?void 0:e.role)==="view"&&"string"==typeof(null==e?void 0:e.context)&&(e.context=G(null==e?void 0:e.context)),e))),E(e.filter(e=>["view","human"].includes(e.role)))},[L,n]),(0,s.useEffect)(()=>{(0,eS.Vx)((0,eS.Lu)()).then(e=>{var t;q(null!==(t=e[1])&&void 0!==t?t:{})}).catch(e=>{console.log(e)})},[]),(0,s.useEffect)(()=>{setTimeout(()=>{var e;null===(e=A.current)||void 0===e||e.scrollTo(0,A.current.scrollHeight)},50)},[n]),(0,a.jsxs)(a.Fragment,{children:[H,(0,a.jsx)("div",{ref:A,className:"flex flex-1 overflow-y-auto pb-8 w-full flex-col",children:(0,a.jsx)("div",{className:"flex items-center flex-1 flex-col text-sm leading-6 text-slate-900 dark:text-slate-300 sm:text-base sm:leading-7",children:P.length?P.map((e,t)=>{var l;return"chat_agent"===m?(0,a.jsx)(eM,{content:e},t):(0,a.jsx)(ei,{content:e,isChartChat:L,onLinkClick:()=>{S(!0),I(JSON.stringify(null==e?void 0:e.context,null,2))},children:"view"===e.role&&(0,a.jsxs)("div",{className:"flex w-full border-t border-gray-200 dark:border-theme-dark",children:["chat_knowledge"===m&&e.retry?(0,a.jsxs)(ek.Z,{onClick:B,slots:{root:eg.ZP},slotProps:{root:{variant:"plain",color:"primary"}},children:[(0,a.jsx)(eO.Z,{}),"\xa0",(0,a.jsx)("span",{className:"text-sm",children:f("Retry")})]}):null,(0,a.jsxs)("div",{className:"flex w-full flex-row-reverse",children:[(0,a.jsx)(eP,{select_param:M,conv_index:Math.ceil((t+1)/2),question:null===(l=null==P?void 0:P.filter(t=>(null==t?void 0:t.role)==="human"&&(null==t?void 0:t.order)===e.order)[0])||void 0===l?void 0:l.context,knowledge_space:_||d||""}),(0,a.jsx)(eC.Z,{title:f("Copy"),children:(0,a.jsx)(ek.Z,{onClick:()=>U(null==e?void 0:e.context),slots:{root:eg.ZP},slotProps:{root:{variant:"plain",color:"primary"}},sx:{borderRadius:40},children:(0,a.jsx)(O.Z,{})})})]})]})},t)}):(0,a.jsx)(eq.Z,{description:"Start a conversation"})})}),(0,a.jsx)("div",{className:j()("relative after:absolute after:-top-8 after:h-8 after:w-full after:bg-gradient-to-t after:from-theme-light after:to-transparent dark:after:from-theme-dark",{"cursor-not-allowed":"chat_excel"===m&&!(null==u?void 0:u.select_param)}),children:(0,a.jsxs)("div",{className:"flex flex-wrap w-full py-2 sm:pt-6 sm:pb-10 items-center",children:[x&&(0,a.jsx)("div",{className:"mr-2 flex",children:(0,y.A)(x)}),(0,a.jsx)(eD.Z,{loading:Z,onSubmit:T,handleFinish:k})]})}),(0,a.jsx)(eE.default,{title:"JSON Editor",open:C,width:"60%",cancelButtonProps:{hidden:!0},onOk:()=>{S(!1)},onCancel:()=>{S(!1)},children:(0,a.jsx)(c.Z,{className:"w-full h-[500px]",language:"json",value:R})})]})},eJ=l(67772),eL=l(45247),eF=()=>{var e;let t=(0,o.useSearchParams)(),{scene:l,chatId:c,model:i,agent:d,setModel:u,history:m,setHistory:x}=(0,s.useContext)(w.p),h=(0,n.Z)({}),p=null!==(e=t&&t.get("initMessage"))&&void 0!==e?e:"",[g,v]=(0,s.useState)(!1),[f,b]=(0,s.useState)(),y=async()=>{v(!0);let[,e]=await (0,eS.Vx)((0,eS.$i)(c));x(null!=e?e:[]),v(!1)},N=e=>{var t;let l=null===(t=e[e.length-1])||void 0===t?void 0:t.context;if(l)try{let e="string"==typeof l?JSON.parse(l):l;b((null==e?void 0:e.template_name)==="report"?null==e?void 0:e.charts:void 0)}catch(e){b(void 0)}};(0,r.Z)(async()=>{let e=(0,V.a_)();e&&e.id===c||await y()},[p,c]),(0,s.useEffect)(()=>{var e,t;if(!m.length)return;let l=null===(e=null===(t=m.filter(e=>"view"===e.role))||void 0===t?void 0:t.slice(-1))||void 0===e?void 0:e[0];(null==l?void 0:l.model_name)&&u(l.model_name),N(m)},[m.length]),(0,s.useEffect)(()=>()=>{x([])},[]);let _=(0,s.useCallback)((e,t)=>new Promise(a=>{let s=[...m,{role:"human",context:e,model_name:i,order:0,time_stamp:0},{role:"view",context:"",model_name:i,order:0,time_stamp:0}],r=s.length-1;x([...s]),h({data:{...t,chat_mode:l||"chat_normal",model_name:i,user_input:e},chatId:c,onMessage:e=>{(null==t?void 0:t.incremental)?s[r].context+=e:s[r].context=e,x([...s])},onDone:()=>{N(s),a()},onClose:()=>{N(s),a()},onError:e=>{s[r].context=e,x([...s]),a()}})}),[m,h,c,i,d,l]);return(0,a.jsxs)(a.Fragment,{children:[(0,a.jsx)(eL.Z,{visible:g}),(0,a.jsx)(eJ.Z,{refreshHistory:y,modelChange:e=>{u(e)}}),(0,a.jsxs)("div",{className:"px-4 flex flex-1 flex-wrap overflow-hidden relative",children:[!!(null==f?void 0:f.length)&&(0,a.jsx)("div",{className:"w-full pb-4 xl:w-3/4 h-1/2 xl:pr-4 xl:h-full overflow-y-auto",children:(0,a.jsx)(E.ZP,{chartsData:f})}),!(null==f?void 0:f.length)&&"chat_dashboard"===l&&(0,a.jsx)(eq.Z,{className:"w-full xl:w-3/4 h-1/2 xl:h-full"}),(0,a.jsx)("div",{className:j()("flex flex-1 flex-col overflow-hidden",{"px-0 xl:pl-4 h-1/2 w-full xl:w-auto xl:h-full border-t xl:border-t-0 xl:border-l dark:border-gray-800":"chat_dashboard"===l,"h-full lg:px-8":"chat_dashboard"!==l}),children:(0,a.jsx)(eA,{messages:m,onSubmit:_})})]})]})}},19409:function(e,t,l){l.d(t,{Z:function(){return D}});var a=l(85893),s=l(27496),r=l(79531),n=l(71577),o=l(67294),c=l(2487),i=l(83062),d=l(2453),u=l(46735),m=l(55241),x=l(39479),h=l(51009),p=l(58299),g=l(56155),v=l(30119),f=l(67421);let j=e=>{let{data:t,loading:l,submit:s,close:r}=e,{t:n}=(0,f.$G)(),o=e=>()=>{s(e),r()};return(0,a.jsx)("div",{style:{maxHeight:400,overflow:"auto"},children:(0,a.jsx)(c.Z,{dataSource:null==t?void 0:t.data,loading:l,rowKey:e=>e.prompt_name,renderItem:e=>(0,a.jsx)(c.Z.Item,{onClick:o(e.content),children:(0,a.jsx)(i.Z,{title:e.content,children:(0,a.jsx)(c.Z.Item.Meta,{style:{cursor:"copy"},title:e.prompt_name,description:n("Prompt_Info_Scene")+":".concat(e.chat_scene,",")+n("Prompt_Info_Sub_Scene")+":".concat(e.sub_chat_scene)})})},e.prompt_name)})})};var b=e=>{let{submit:t}=e,{t:l}=(0,f.$G)(),[s,r]=(0,o.useState)(!1),[n,c]=(0,o.useState)("common"),{data:b,loading:y}=(0,g.Z)(()=>(0,v.PR)("/prompt/list",{prompt_type:n}),{refreshDeps:[n],onError:e=>{d.ZP.error(null==e?void 0:e.message)}});return(0,a.jsx)(u.ZP,{theme:{components:{Popover:{minWidth:250}}},children:(0,a.jsx)(m.Z,{title:(0,a.jsx)(x.Z.Item,{label:"Prompt "+l("Type"),children:(0,a.jsx)(h.default,{style:{width:150},value:n,onChange:e=>{c(e)},options:[{label:l("Public")+" Prompts",value:"common"},{label:l("Private")+" Prompts",value:"private"}]})}),content:(0,a.jsx)(j,{data:b,loading:y,submit:t,close:()=>{r(!1)}}),placement:"topRight",trigger:"click",open:s,onOpenChange:e=>{r(e)},children:(0,a.jsx)(i.Z,{title:l("Click_Select")+" Prompt",children:(0,a.jsx)(p.Z,{className:"bottom-[30%]"})})})})},y=l(41468),w=l(89182),N=l(80573),_=l(5392),Z=l(84553);function k(e){let{dbParam:t,setDocId:l}=(0,o.useContext)(y.p),{onUploadFinish:s,handleFinish:r}=e,c=(0,N.Z)(),[i,d]=(0,o.useState)(!1),u=async e=>{d(!0);let a=new FormData;a.append("doc_name",e.file.name),a.append("doc_file",e.file),a.append("doc_type","DOCUMENT");let n=await (0,w.Vx)((0,w.iG)(t||"default",a));if(!n[1]){d(!1);return}l(n[1]),s(),d(!1),null==r||r(!0),await c(n[1]),null==r||r(!1)};return(0,a.jsx)(Z.default,{customRequest:u,showUploadList:!1,maxCount:1,multiple:!1,className:"absolute z-10 top-2 left-2",accept:".pdf,.ppt,.pptx,.xls,.xlsx,.doc,.docx,.txt,.md",children:(0,a.jsx)(n.ZP,{loading:i,size:"small",shape:"circle",icon:(0,a.jsx)(_.Z,{})})})}var C=l(11163),S=l(82353),P=l(1051);function E(e){let{document:t}=e;switch(t.status){case"RUNNING":return(0,a.jsx)(S.Rp,{});case"FINISHED":default:return(0,a.jsx)(S.s2,{});case"FAILED":return(0,a.jsx)(P.Z,{})}}function R(e){let{documents:t,dbParam:l}=e,s=(0,C.useRouter)(),r=e=>{s.push("/knowledge/chunk/?spaceName=".concat(l,"&id=").concat(e))};return(null==t?void 0:t.length)?(0,a.jsx)("div",{className:"absolute flex overflow-scroll h-12 top-[-35px] w-full z-10",children:t.map(e=>{let t;switch(e.status){case"RUNNING":t="#2db7f5";break;case"FINISHED":default:t="#87d068";break;case"FAILED":t="#f50"}return(0,a.jsx)(i.Z,{title:e.result,children:(0,a.jsxs)(n.ZP,{style:{color:t},onClick:()=>{r(e.id)},className:"shrink flex items-center mr-3",children:[(0,a.jsx)(E,{document:e}),e.doc_name]})},e.id)})}):null}var D=function(e){let{children:t,loading:l,onSubmit:c,handleFinish:i,...d}=e,{dbParam:u,scene:m}=(0,o.useContext)(y.p),[x,h]=(0,o.useState)(""),p=(0,o.useMemo)(()=>"chat_knowledge"===m,[m]),[g,v]=(0,o.useState)([]),f=(0,o.useRef)(0);async function j(){if(!u)return null;let[e,t]=await (0,w.Vx)((0,w._Q)(u,{page:1,page_size:f.current}));v(null==t?void 0:t.data)}(0,o.useEffect)(()=>{p&&j()},[u]);let N=async()=>{f.current+=1,await j()};return(0,a.jsxs)("div",{className:"flex-1 relative",children:[(0,a.jsx)(R,{documents:g,dbParam:u}),p&&(0,a.jsx)(k,{handleFinish:i,onUploadFinish:N,className:"absolute z-10 top-2 left-2"}),(0,a.jsx)(r.default.TextArea,{className:"flex-1 ".concat(p?"pl-10":""," pr-10"),size:"large",value:x,autoSize:{minRows:1,maxRows:4},...d,onPressEnter:e=>{if(x.trim()&&13===e.keyCode){if(e.shiftKey){h(e=>e+"\n");return}c(x),setTimeout(()=>{h("")},0)}},onChange:e=>{if("number"==typeof d.maxLength){h(e.target.value.substring(0,d.maxLength));return}h(e.target.value)}}),(0,a.jsx)(n.ZP,{className:"ml-2 flex items-center justify-center absolute right-0 bottom-0",size:"large",type:"text",loading:l,icon:(0,a.jsx)(s.Z,{}),onClick:()=>{c(x)}}),(0,a.jsx)(b,{submit:e=>{h(x+e)}}),t]})}},45247:function(e,t,l){var a=l(85893),s=l(50888);t.Z=function(e){let{visible:t}=e;return t?(0,a.jsx)("div",{className:"absolute w-full h-full top-0 left-0 flex justify-center items-center z-10 bg-white dark:bg-black bg-opacity-50 dark:bg-opacity-50 backdrop-blur-sm text-3xl animate-fade animate-duration-200",children:(0,a.jsx)(s.Z,{})}):null}},43446:function(e,t,l){var a=l(1375),s=l(2453),r=l(67294),n=l(36353),o=l(41468),c=l(83454);t.Z=e=>{let{queryAgentURL:t="/api/v1/chat/completions"}=e,l=(0,r.useMemo)(()=>new AbortController,[]),{scene:i}=(0,r.useContext)(o.p),d=(0,r.useCallback)(async e=>{let{data:r,chatId:o,onMessage:d,onClose:u,onDone:m,onError:x}=e;if(!(null==r?void 0:r.user_input)&&!(null==r?void 0:r.doc_id)){s.ZP.warning(n.Z.t("no_context_tip"));return}let h={...r,conv_uid:o};if(!h.conv_uid){s.ZP.error("conv_uid 不存在,请刷新后重试");return}try{var p;await (0,a.L)("".concat(null!==(p=c.env.API_BASE_URL)&&void 0!==p?p:"").concat(t),{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(h),signal:l.signal,openWhenHidden:!0,async onopen(e){if(e.ok&&e.headers.get("content-type")===a.a)return},onclose(){l.abort(),null==u||u()},onerror(e){throw Error(e)},onmessage:e=>{let t=e.data;try{t="chat_agent"===i?JSON.parse(t).vis:JSON.parse(t)}catch(e){t.replaceAll("\\n","\n")}"string"==typeof t?"[DONE]"===t?null==m||m():(null==t?void 0:t.startsWith("[ERROR]"))?null==x||x(null==t?void 0:t.replace("[ERROR]","")):null==d||d(t):(null==d||d(t),null==m||m())}})}catch(e){l.abort(),null==x||x("Sorry, We meet some error, please try agin later.",e)}},[t]);return(0,r.useEffect)(()=>()=>{l.abort()},[]),d}},80573:function(e,t,l){var a=l(41468),s=l(67294),r=l(43446),n=l(89182);t.Z=()=>{let{history:e,setHistory:t,chatId:l,model:o,docId:c}=(0,s.useContext)(a.p),i=(0,r.Z)({queryAgentURL:"/knowledge/document/summary"}),d=(0,s.useCallback)(async e=>{let[,a]=await (0,n.Vx)((0,n.$i)(l)),s=[...a,{role:"human",context:"",model_name:o,order:0,time_stamp:0},{role:"view",context:"",model_name:o,order:0,time_stamp:0,retry:!0}],r=s.length-1;t([...s]),await i({data:{doc_id:e||c,model_name:o},chatId:l,onMessage:e=>{s[r].context=e,t([...s])}})},[e,o,c,l]);return d}}}]);
\ No newline at end of file
+"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[4134],{12545:function(e,t,l){l.r(t),l.d(t,{default:function(){return eF}});var a=l(85893),s=l(67294),r=l(2093),n=l(43446),o=l(39332),c=l(74434),i=l(24019),d=l(50888),u=l(97937),m=l(63606),x=l(50228),h=l(87547),p=l(89035),g=l(92975),v=l(12767),f=l(94184),j=l.n(f),b=l(66309),y=l(81799),w=l(41468),N=l(29158),_=l(98165),Z=l(14079),k=l(38426),C=l(45396),S=l(44442),P=l(55241),D=l(39156),E=l(71577),R=l(2453),O=l(57132),I=l(36096),M=l(79166),q=l(93179),A=l(20640),J=l.n(A);function L(e){let{code:t,light:l,dark:r,language:n,customStyle:o}=e,{mode:c}=(0,s.useContext)(w.p);return(0,a.jsxs)("div",{className:"relative",children:[(0,a.jsx)(E.ZP,{className:"absolute right-3 top-2 text-gray-300 hover:!text-gray-200 bg-gray-700",type:"text",icon:(0,a.jsx)(O.Z,{}),onClick:()=>{let e=J()(t);R.ZP[e?"success":"error"](e?"Copy success":"Copy failed")}}),(0,a.jsx)(q.Z,{customStyle:o,language:n,style:"dark"===c?null!=r?r:I.Z:null!=l?l:M.Z,children:t})]})}var F=l(14313),z=l(47221),T=function(e){let{data:t}=e;return t&&t.length?(0,a.jsx)(z.Z,{bordered:!0,className:"my-3",expandIcon:e=>{let{isActive:t}=e;return(0,a.jsx)(F.Z,{rotate:t?90:0})},items:t.map((e,t)=>({key:t,label:(0,a.jsxs)("div",{className:"whitespace-normal",children:[(0,a.jsxs)("span",{children:[e.name," - ",e.agent]}),"complete"===e.status?(0,a.jsx)(m.Z,{className:"!text-green-500 ml-2"}):(0,a.jsx)(i.Z,{className:"!text-gray-500 ml-2"})]}),children:(0,a.jsx)(g.D,{components:en,children:e.markdown})}))}):null},G=l(32198),$=function(e){let{data:t}=e;return t&&t.length?(0,a.jsx)(a.Fragment,{children:t.map((e,t)=>(0,a.jsxs)("div",{className:"rounded my-4 md:my-6",children:[(0,a.jsxs)("div",{className:"flex items-center mb-3 text-sm",children:[e.model?(0,y.A)(e.model):(0,a.jsx)("div",{className:"rounded-full w-6 h-6 bg-gray-100"}),(0,a.jsxs)("div",{className:"ml-2 opacity-70",children:[e.sender,(0,a.jsx)(G.Z,{className:"mx-2 text-base"}),e.receiver]})]}),(0,a.jsx)("div",{className:"whitespace-normal text-sm",children:(0,a.jsx)(g.D,{components:en,children:e.markdown})})]},t))}):null},V=l(62418),H=function(e){let{data:t}=e;return(0,a.jsxs)("div",{className:"rounded overflow-hidden",children:[(0,a.jsx)("div",{className:"p-3 text-white bg-red-500 whitespace-normal",children:t.display_type}),(0,a.jsxs)("div",{className:"p-3 bg-red-50",children:[(0,a.jsx)("div",{className:"mb-2 whitespace-normal",children:t.thought}),(0,a.jsx)(L,{code:(0,V._m)(t.sql),language:"sql"})]})]})},U=l(8497),B=function(e){var t;let{data:l,type:s,sql:r}=e,n=(null==l?void 0:l[0])?null===(t=Object.keys(null==l?void 0:l[0]))||void 0===t?void 0:t.map(e=>({title:e,dataIndex:e,key:e})):[],o={key:"chart",label:"Chart",children:(0,a.jsx)(U._,{data:l,chartType:(0,U.a)(s)})},c={key:"sql",label:"SQL",children:(0,a.jsx)(L,{language:"sql",code:(0,V._m)(r)})},i={key:"data",label:"Data",children:(0,a.jsx)(C.Z,{dataSource:l,columns:n,scroll:{x:"auto"}})},d="response_table"===s?[i,c]:[o,c,i];return(0,a.jsx)(S.Z,{defaultActiveKey:"response_table"===s?"data":"chart",items:d,size:"small"})},Q=function(e){let{data:t}=e;return(0,a.jsx)(B,{data:t.data,type:t.type,sql:t.sql})};let W=[[2],[1,2],[1,3],[2,1,2],[2,1,3],[3,1,3],[3,2,3]];var K=function(e){let{data:t}=e,l=(0,s.useMemo)(()=>{if(t.chart_count>1){let e=W[t.chart_count-2],l=0;return e.map(e=>{let a=t.data.slice(l,l+e);return l=e,a})}return[t.data]},[t.data,t.chart_count]);return(0,a.jsx)("div",{className:"flex flex-col gap-3",children:l.map((e,t)=>(0,a.jsx)("div",{className:"flex gap-3",children:e.map((e,t)=>(0,a.jsxs)("div",{className:"flex flex-1 flex-col justify-between p-4 rounded border border-gray-200 dark:border-gray-500 whitespace-normal",children:[(0,a.jsxs)("div",{children:[e.title&&(0,a.jsx)("div",{className:"mb-2 text-lg",children:e.title}),e.describe&&(0,a.jsx)("div",{className:"mb-4 text-sm text-gray-500",children:e.describe})]}),(0,a.jsx)(D._z,{data:e.data,chartType:(0,D.aG)(e.type)})]},"chart-".concat(t)))},"row-".concat(t)))})};let X={todo:{bgClass:"bg-gray-500",icon:(0,a.jsx)(i.Z,{className:"ml-2"})},runing:{bgClass:"bg-blue-500",icon:(0,a.jsx)(d.Z,{className:"ml-2"})},failed:{bgClass:"bg-red-500",icon:(0,a.jsx)(u.Z,{className:"ml-2"})},complete:{bgClass:"bg-green-500",icon:(0,a.jsx)(m.Z,{className:"ml-2"})}};var Y=function(e){var t,l;let{data:s}=e,{bgClass:r,icon:n}=null!==(t=X[s.status])&&void 0!==t?t:{};return(0,a.jsxs)("div",{className:"bg-theme-light dark:bg-theme-dark-container rounded overflow-hidden my-2 flex flex-col lg:max-w-[80%]",children:[(0,a.jsxs)("div",{className:j()("flex px-4 md:px-6 py-2 items-center text-white text-sm",r),children:[s.name,n]}),s.result?(0,a.jsx)("div",{className:"px-4 md:px-6 py-4 text-sm whitespace-normal",children:(0,a.jsx)(g.D,{components:en,rehypePlugins:[v.Z],children:null!==(l=s.result)&&void 0!==l?l:""})}):(0,a.jsx)("div",{className:"px-4 md:px-6 py-4 text-sm",children:s.err_msg})]})},ee=l(76199),et=l(67421),el=l(24136),ea=function(e){let{data:t}=e,{t:l}=(0,et.$G)(),[r,n]=(0,s.useState)(0);return(0,a.jsxs)("div",{className:"bg-[#EAEAEB] rounded overflow-hidden border border-theme-primary dark:bg-theme-dark text-sm",children:[(0,a.jsxs)("div",{children:[(0,a.jsx)("div",{className:"flex",children:t.code.map((e,t)=>(0,a.jsxs)("div",{className:j()("px-4 py-2 text-[#121417] dark:text-white cursor-pointer",{"bg-white dark:bg-theme-dark-container":t===r}),onClick:()=>{n(t)},children:["CODE ",t+1,": ",e[0]]},t))}),t.code.length&&(0,a.jsx)(L,{language:t.code[r][0],code:t.code[r][1],customStyle:{maxHeight:300,margin:0},light:el.Z,dark:M.Z})]}),(0,a.jsxs)("div",{children:[(0,a.jsx)("div",{className:"flex",children:(0,a.jsxs)("div",{className:"bg-white dark:bg-theme-dark-container px-4 py-2 text-[#121417] dark:text-white",children:[l("Terminal")," ",t.exit_success?(0,a.jsx)(m.Z,{className:"text-green-600"}):(0,a.jsx)(u.Z,{className:"text-red-600"})]})}),(0,a.jsx)("div",{className:"p-4 max-h-72 overflow-y-auto whitespace-normal bg-white dark:dark:bg-theme-dark",children:(0,a.jsx)(g.D,{components:en,remarkPlugins:[ee.Z],children:t.log})})]})]})};let es=["custom-view","chart-view","references","summary"],er={code(e){let{inline:t,node:l,className:s,children:r,style:n,...o}=e,c=String(r),{context:i,matchValues:d}=function(e){let t=es.reduce((t,l)=>{let a=RegExp("<".concat(l,"[^>]*/?>"),"gi");return e=e.replace(a,e=>(t.push(e),"")),t},[]);return{context:e,matchValues:t}}(c),u=(null==s?void 0:s.replace("language-",""))||"javascript";if("agent-plans"===u)try{let e=JSON.parse(c);return(0,a.jsx)(T,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("agent-messages"===u)try{let e=JSON.parse(c);return(0,a.jsx)($,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("vis-convert-error"===u)try{let e=JSON.parse(c);return(0,a.jsx)(H,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("vis-dashboard"===u)try{let e=JSON.parse(c);return(0,a.jsx)(K,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("vis-chart"===u)try{let e=JSON.parse(c);return(0,a.jsx)(Q,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("vis-plugin"===u)try{let e=JSON.parse(c);return(0,a.jsx)(Y,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}if("vis-code"===u)try{let e=JSON.parse(c);return(0,a.jsx)(ea,{data:e})}catch(e){return(0,a.jsx)(L,{language:u,code:c})}return(0,a.jsxs)(a.Fragment,{children:[t?(0,a.jsx)("code",{...o,style:n,className:"p-1 mx-1 rounded bg-theme-light dark:bg-theme-dark text-sm",children:r}):(0,a.jsx)(L,{code:i,language:u}),(0,a.jsx)(g.D,{components:er,rehypePlugins:[v.Z],children:d.join("\n")})]})},ul(e){let{children:t}=e;return(0,a.jsx)("ul",{className:"py-1",children:t})},ol(e){let{children:t}=e;return(0,a.jsx)("ol",{className:"py-1",children:t})},li(e){let{children:t,ordered:l}=e;return(0,a.jsx)("li",{className:"text-sm leading-7 ml-5 pl-2 text-gray-600 dark:text-gray-300 ".concat(l?"list-decimal":"list-disc"),children:t})},table(e){let{children:t}=e;return(0,a.jsx)("table",{className:"my-2 rounded-tl-md rounded-tr-md max-w-full bg-white dark:bg-gray-800 text-sm rounded-lg overflow-hidden",children:t})},thead(e){let{children:t}=e;return(0,a.jsx)("thead",{className:"bg-[#fafafa] dark:bg-black font-semibold",children:t})},th(e){let{children:t}=e;return(0,a.jsx)("th",{className:"!text-left p-4",children:t})},td(e){let{children:t}=e;return(0,a.jsx)("td",{className:"p-4 border-t border-[#f0f0f0] dark:border-gray-700",children:t})},h1(e){let{children:t}=e;return(0,a.jsx)("h3",{className:"text-2xl font-bold my-4 border-b border-slate-300 pb-4",children:t})},h2(e){let{children:t}=e;return(0,a.jsx)("h3",{className:"text-xl font-bold my-3",children:t})},h3(e){let{children:t}=e;return(0,a.jsx)("h3",{className:"text-lg font-semibold my-2",children:t})},h4(e){let{children:t}=e;return(0,a.jsx)("h3",{className:"text-base font-semibold my-1",children:t})},a(e){let{children:t,href:l}=e;return(0,a.jsxs)("div",{className:"inline-block text-blue-600 dark:text-blue-400",children:[(0,a.jsx)(N.Z,{className:"mr-1"}),(0,a.jsx)("a",{href:l,target:"_blank",children:t})]})},img(e){let{src:t,alt:l}=e;return(0,a.jsx)("div",{children:(0,a.jsx)(k.Z,{className:"min-h-[1rem] max-w-full max-h-full border rounded",src:t,alt:l,placeholder:(0,a.jsx)(b.Z,{icon:(0,a.jsx)(_.Z,{spin:!0}),color:"processing",children:"Image Loading..."}),fallback:"/images/fallback.png"})})},blockquote(e){let{children:t}=e;return(0,a.jsx)("blockquote",{className:"py-4 px-6 border-l-4 border-blue-600 rounded bg-white my-2 text-gray-500 dark:bg-slate-800 dark:text-gray-200 dark:border-white shadow-sm",children:t})},"chart-view":function(e){var t,l,s;let r,{content:n,children:o}=e;try{r=JSON.parse(n)}catch(e){console.log(e,n),r={type:"response_table",sql:"",data:[]}}let c=(null==r?void 0:null===(t=r.data)||void 0===t?void 0:t[0])?null===(l=Object.keys(null==r?void 0:null===(s=r.data)||void 0===s?void 0:s[0]))||void 0===l?void 0:l.map(e=>({title:e,dataIndex:e,key:e})):[],i={key:"chart",label:"Chart",children:(0,a.jsx)(D._z,{data:null==r?void 0:r.data,chartType:(0,D.aG)(null==r?void 0:r.type)})},d={key:"sql",label:"SQL",children:(0,a.jsx)(L,{code:(0,V._m)(null==r?void 0:r.sql,"mysql"),language:"sql"})},u={key:"data",label:"Data",children:(0,a.jsx)(C.Z,{dataSource:null==r?void 0:r.data,columns:c})},m=(null==r?void 0:r.type)==="response_table"?[u,d]:[i,d,u];return(0,a.jsxs)("div",{children:[(0,a.jsx)(S.Z,{defaultActiveKey:(null==r?void 0:r.type)==="response_table"?"data":"chart",items:m,size:"small"}),o]})},references:function(e){let t,{title:l,references:s,children:r}=e;if(r)try{l=(t=JSON.parse(r)).title,s=t.references}catch(e){return console.log("parse references failed",e),(0,a.jsx)("p",{className:"text-sm text-red-500",children:"Render Reference Error!"})}else try{s=JSON.parse(s)}catch(e){return console.log("parse references failed",e),(0,a.jsx)("p",{className:"text-sm text-red-500",children:"Render Reference Error!"})}return!s||(null==s?void 0:s.length)<1?null:(0,a.jsxs)("div",{className:"border-t-[1px] border-gray-300 mt-3 py-2",children:[(0,a.jsxs)("p",{className:"text-sm text-gray-500 dark:text-gray-400 mb-2",children:[(0,a.jsx)(N.Z,{className:"mr-2"}),(0,a.jsx)("span",{className:"font-semibold",children:l})]}),s.map((e,t)=>{var l;return(0,a.jsxs)("div",{className:"text-sm font-normal block ml-2 h-6 leading-6 overflow-hidden",children:[(0,a.jsxs)("span",{className:"inline-block w-6",children:["[",t+1,"]"]}),(0,a.jsx)("span",{className:"mr-2 lg:mr-4 text-blue-400",children:e.name}),null==e?void 0:null===(l=e.chunks)||void 0===l?void 0:l.map((t,l)=>(0,a.jsxs)("span",{children:["object"==typeof t?(0,a.jsx)(P.Z,{content:(0,a.jsxs)("div",{className:"max-w-4xl",children:[(0,a.jsx)("p",{className:"mt-2 font-bold mr-2 border-t border-gray-500 pt-2",children:"Content:"}),(0,a.jsx)("p",{children:(null==t?void 0:t.content)||"No Content"}),(0,a.jsx)("p",{className:"mt-2 font-bold mr-2 border-t border-gray-500 pt-2",children:"MetaData:"}),(0,a.jsx)("p",{children:(null==t?void 0:t.meta_info)||"No MetaData"}),(0,a.jsx)("p",{className:"mt-2 font-bold mr-2 border-t border-gray-500 pt-2",children:"Score:"}),(0,a.jsx)("p",{children:(null==t?void 0:t.recall_score)||""})]}),title:"Chunk Information",children:(0,a.jsx)("span",{className:"cursor-pointer text-blue-500 ml-2",children:null==t?void 0:t.id},"chunk_content_".concat(null==t?void 0:t.id))}):(0,a.jsx)("span",{className:"cursor-pointer text-blue-500 ml-2",children:t},"chunk_id_".concat(t)),l<(null==e?void 0:e.chunks.length)-1&&(0,a.jsx)("span",{children:","},"chunk_comma_".concat(l))]},"chunk_".concat(l)))]},"file_".concat(t))})]})},summary:function(e){let{children:t}=e;return(0,a.jsxs)("div",{children:[(0,a.jsxs)("p",{className:"mb-2",children:[(0,a.jsx)(Z.Z,{className:"mr-2"}),(0,a.jsx)("span",{className:"font-semibold",children:"Document Summary"})]}),(0,a.jsx)("div",{children:t})]})}};var en=er;let eo={todo:{bgClass:"bg-gray-500",icon:(0,a.jsx)(i.Z,{className:"ml-2"})},runing:{bgClass:"bg-blue-500",icon:(0,a.jsx)(d.Z,{className:"ml-2"})},failed:{bgClass:"bg-red-500",icon:(0,a.jsx)(u.Z,{className:"ml-2"})},completed:{bgClass:"bg-green-500",icon:(0,a.jsx)(m.Z,{className:"ml-2"})}};function ec(e){return e.replaceAll("\\n","\n").replace(/]+)>/gi,"").replace(/]+)>/gi," ")}var ei=(0,s.memo)(function(e){let{children:t,content:l,isChartChat:r,onLinkClick:n}=e,{scene:o}=(0,s.useContext)(w.p),{context:c,model_name:i,role:d}=l,u="view"===d,{relations:m,value:f,cachePluginContext:N}=(0,s.useMemo)(()=>{if("string"!=typeof c)return{relations:[],value:"",cachePluginContext:[]};let[e,t]=c.split(" relations:"),l=t?t.split(","):[],a=[],s=0,r=e.replace(/]*>[^<]*<\/dbgpt-view>/gi,e=>{try{var t;let l=e.replaceAll("\n","\\n").replace(/<[^>]*>|<\/[^>]*>/gm,""),r=JSON.parse(l),n="".concat(s," ");return a.push({...r,result:ec(null!==(t=r.result)&&void 0!==t?t:"")}),s++,n}catch(t){return console.log(t.message,t),e}});return{relations:l,cachePluginContext:a,value:r}},[c]),_=(0,s.useMemo)(()=>({"custom-view"(e){var t;let{children:l}=e,s=+l.toString();if(!N[s])return l;let{name:r,status:n,err_msg:o,result:c}=N[s],{bgClass:i,icon:d}=null!==(t=eo[n])&&void 0!==t?t:{};return(0,a.jsxs)("div",{className:"bg-white dark:bg-[#212121] rounded-lg overflow-hidden my-2 flex flex-col lg:max-w-[80%]",children:[(0,a.jsxs)("div",{className:j()("flex px-4 md:px-6 py-2 items-center text-white text-sm",i),children:[r,d]}),c?(0,a.jsx)("div",{className:"px-4 md:px-6 py-4 text-sm",children:(0,a.jsx)(g.D,{components:en,rehypePlugins:[v.Z],children:null!=c?c:""})}):(0,a.jsx)("div",{className:"px-4 md:px-6 py-4 text-sm",children:o})]})}}),[c,N]);return u||c?(0,a.jsxs)("div",{className:j()("relative flex flex-wrap w-full p-2 md:p-4 rounded-xl break-words",{"bg-white dark:bg-[#232734]":u,"lg:w-full xl:w-full pl-0":["chat_with_db_execute","chat_dashboard"].includes(o)}),children:[(0,a.jsx)("div",{className:"mr-2 flex flex-shrink-0 items-center justify-center h-7 w-7 rounded-full text-lg sm:mr-4",children:u?(0,y.A)(i)||(0,a.jsx)(x.Z,{}):(0,a.jsx)(h.Z,{})}),(0,a.jsxs)("div",{className:"flex-1 overflow-hidden items-center text-md leading-8 pb-2",children:[!u&&"string"==typeof c&&c,u&&r&&"object"==typeof c&&(0,a.jsxs)("div",{children:["[".concat(c.template_name,"]: "),(0,a.jsxs)("span",{className:"text-theme-primary cursor-pointer",onClick:n,children:[(0,a.jsx)(p.Z,{className:"mr-1"}),c.template_introduce||"More Details"]})]}),u&&"string"==typeof c&&(0,a.jsx)(g.D,{components:{...en,..._},rehypePlugins:[v.Z],children:ec(f)}),!!(null==m?void 0:m.length)&&(0,a.jsx)("div",{className:"flex flex-wrap mt-2",children:null==m?void 0:m.map((e,t)=>(0,a.jsx)(b.Z,{color:"#108ee9",children:e},e+t))})]}),t]}):(0,a.jsx)("div",{className:"h-12"})}),ed=l(59301),eu=l(41132),em=l(74312),ex=l(3414),eh=l(72868),ep=l(59562),eg=l(14553),ev=l(25359),ef=l(7203),ej=l(48665),eb=l(26047),ey=l(99056),ew=l(57814),eN=l(63955),e_=l(33028),eZ=l(40911),ek=l(66478),eC=l(83062),eS=l(89182),eP=e=>{var t;let{conv_index:l,question:r,knowledge_space:n,select_param:o}=e,{t:c}=(0,et.$G)(),{chatId:i}=(0,s.useContext)(w.p),[d,u]=(0,s.useState)(""),[m,x]=(0,s.useState)(4),[h,p]=(0,s.useState)(""),g=(0,s.useRef)(null),[v,f]=R.ZP.useMessage(),j=(0,s.useCallback)((e,t)=>{t?(0,eS.Vx)((0,eS.Eb)(i,l)).then(e=>{var t,l,a,s;let r=null!==(t=e[1])&&void 0!==t?t:{};u(null!==(l=r.ques_type)&&void 0!==l?l:""),x(parseInt(null!==(a=r.score)&&void 0!==a?a:"4")),p(null!==(s=r.messages)&&void 0!==s?s:"")}).catch(e=>{console.log(e)}):(u(""),x(4),p(""))},[i,l]),b=(0,em.Z)(ex.Z)(e=>{let{theme:t}=e;return{backgroundColor:"dark"===t.palette.mode?"#FBFCFD":"#0E0E10",...t.typography["body-sm"],padding:t.spacing(1),display:"flex",alignItems:"center",justifyContent:"center",borderRadius:4,width:"100%",height:"100%"}});return(0,a.jsxs)(eh.L,{onOpenChange:j,children:[f,(0,a.jsx)(eC.Z,{title:c("Rating"),children:(0,a.jsx)(ep.Z,{slots:{root:eg.ZP},slotProps:{root:{variant:"plain",color:"primary"}},sx:{borderRadius:40},children:(0,a.jsx)(ed.Z,{})})}),(0,a.jsxs)(ev.Z,{children:[(0,a.jsx)(ef.Z,{disabled:!0,sx:{minHeight:0}}),(0,a.jsx)(ej.Z,{sx:{width:"100%",maxWidth:350,display:"grid",gap:3,padding:1},children:(0,a.jsx)("form",{onSubmit:e=>{e.preventDefault();let t={conv_uid:i,conv_index:l,question:r,knowledge_space:n,score:m,ques_type:d,messages:h};console.log(t),(0,eS.Vx)((0,eS.VC)({data:t})).then(e=>{v.open({type:"success",content:"save success"})}).catch(e=>{v.open({type:"error",content:"save error"})})},children:(0,a.jsxs)(eb.Z,{container:!0,spacing:.5,columns:13,sx:{flexGrow:1},children:[(0,a.jsx)(eb.Z,{xs:3,children:(0,a.jsx)(b,{children:c("Q_A_Category")})}),(0,a.jsx)(eb.Z,{xs:10,children:(0,a.jsx)(ey.Z,{action:g,value:d,placeholder:"Choose one…",onChange:(e,t)=>u(null!=t?t:""),...d&&{endDecorator:(0,a.jsx)(eg.ZP,{size:"sm",variant:"plain",color:"neutral",onMouseDown:e=>{e.stopPropagation()},onClick:()=>{var e;u(""),null===(e=g.current)||void 0===e||e.focusVisible()},children:(0,a.jsx)(eu.Z,{})}),indicator:null},sx:{width:"100%"},children:o&&(null===(t=Object.keys(o))||void 0===t?void 0:t.map(e=>(0,a.jsx)(ew.Z,{value:e,children:o[e]},e)))})}),(0,a.jsx)(eb.Z,{xs:3,children:(0,a.jsx)(b,{children:(0,a.jsx)(eC.Z,{title:(0,a.jsx)(ej.Z,{children:(0,a.jsx)("div",{children:c("feed_back_desc")})}),variant:"solid",placement:"left",children:c("Q_A_Rating")})})}),(0,a.jsx)(eb.Z,{xs:10,sx:{pl:0,ml:0},children:(0,a.jsx)(eN.Z,{"aria-label":"Custom",step:1,min:0,max:5,valueLabelFormat:function(e){return({0:c("Lowest"),1:c("Missed"),2:c("Lost"),3:c("Incorrect"),4:c("Verbose"),5:c("Best")})[e]},valueLabelDisplay:"on",marks:[{value:0,label:"0"},{value:1,label:"1"},{value:2,label:"2"},{value:3,label:"3"},{value:4,label:"4"},{value:5,label:"5"}],sx:{width:"90%",pt:3,m:2,ml:1},onChange:e=>{var t;return x(null===(t=e.target)||void 0===t?void 0:t.value)},value:m})}),(0,a.jsx)(eb.Z,{xs:13,children:(0,a.jsx)(e_.Z,{placeholder:c("Please_input_the_text"),value:h,onChange:e=>p(e.target.value),minRows:2,maxRows:4,endDecorator:(0,a.jsx)(eZ.ZP,{level:"body-xs",sx:{ml:"auto"},children:c("input_count")+h.length+c("input_unit")}),sx:{width:"100%",fontSize:14}})}),(0,a.jsx)(eb.Z,{xs:13,children:(0,a.jsx)(ek.Z,{type:"submit",variant:"outlined",sx:{width:"100%",height:"100%"},children:c("submit")})})]})})})]})]})},eD=l(36147),eE=l(96486),eR=l(19409),eO=l(87740),eI=l(80573),eM=(0,s.memo)(function(e){let{content:t}=e,{scene:l}=(0,s.useContext)(w.p),r="view"===t.role;return(0,a.jsx)("div",{className:j()("relative w-full p-2 md:p-4 rounded-xl break-words",{"bg-white dark:bg-[#232734]":r,"lg:w-full xl:w-full pl-0":["chat_with_db_execute","chat_dashboard"].includes(l)}),children:r?(0,a.jsx)(g.D,{components:en,rehypePlugins:[v.Z],children:t.context.replace(/]+)>/gi,"").replace(/]+)>/gi," ")}):(0,a.jsx)("div",{className:"",children:t.context})})}),eq=l(91085),eA=e=>{var t,l;let{messages:n,onSubmit:i}=e,{dbParam:d,currentDialogue:u,scene:m,model:x,refreshDialogList:h,chatId:p,agent:g,docId:v}=(0,s.useContext)(w.p),{t:f}=(0,et.$G)(),b=(0,o.useSearchParams)(),N=null!==(t=b&&b.get("select_param"))&&void 0!==t?t:"",_=null!==(l=b&&b.get("spaceNameOriginal"))&&void 0!==l?l:"",[Z,k]=(0,s.useState)(!1),[C,S]=(0,s.useState)(!1),[P,D]=(0,s.useState)(n),[E,I]=(0,s.useState)(""),[M,q]=(0,s.useState)(),A=(0,s.useRef)(null),L=(0,s.useMemo)(()=>"chat_dashboard"===m,[m]),F=(0,eI.Z)(),z=(0,s.useMemo)(()=>{switch(m){case"chat_agent":return g;case"chat_excel":return null==u?void 0:u.select_param;case"chat_flow":return N;default:return _||d}},[m,g,u,d,_,N]),T=async e=>{if(!Z&&e.trim()){if("chat_agent"===m&&!g){R.ZP.warning(f("choice_agent_tip"));return}try{k(!0),await i(e,{select_param:null!=z?z:""})}finally{k(!1)}}},G=e=>{try{return JSON.parse(e)}catch(t){return e}},[$,H]=R.ZP.useMessage(),U=async e=>{let t=null==e?void 0:e.replace(/\trelations:.*/g,""),l=J()(t);l?t?$.open({type:"success",content:f("Copy_success")}):$.open({type:"warning",content:f("Copy_nothing")}):$.open({type:"error",content:f("Copry_error")})},B=async()=>{!Z&&v&&(k(!0),await F(v),k(!1))};return(0,r.Z)(async()=>{let e=(0,V.a_)();e&&e.id===p&&(await T(e.message),h(),localStorage.removeItem(V.rU))},[p]),(0,s.useEffect)(()=>{let e=n;L&&(e=(0,eE.cloneDeep)(n).map(e=>((null==e?void 0:e.role)==="view"&&"string"==typeof(null==e?void 0:e.context)&&(e.context=G(null==e?void 0:e.context)),e))),D(e.filter(e=>["view","human"].includes(e.role)))},[L,n]),(0,s.useEffect)(()=>{(0,eS.Vx)((0,eS.Lu)()).then(e=>{var t;q(null!==(t=e[1])&&void 0!==t?t:{})}).catch(e=>{console.log(e)})},[]),(0,s.useEffect)(()=>{setTimeout(()=>{var e;null===(e=A.current)||void 0===e||e.scrollTo(0,A.current.scrollHeight)},50)},[n]),(0,a.jsxs)(a.Fragment,{children:[H,(0,a.jsx)("div",{ref:A,className:"flex flex-1 overflow-y-auto pb-8 w-full flex-col",children:(0,a.jsx)("div",{className:"flex items-center flex-1 flex-col text-sm leading-6 text-slate-900 dark:text-slate-300 sm:text-base sm:leading-7",children:P.length?P.map((e,t)=>{var l;return"chat_agent"===m?(0,a.jsx)(eM,{content:e},t):(0,a.jsx)(ei,{content:e,isChartChat:L,onLinkClick:()=>{S(!0),I(JSON.stringify(null==e?void 0:e.context,null,2))},children:"view"===e.role&&(0,a.jsxs)("div",{className:"flex w-full border-t border-gray-200 dark:border-theme-dark",children:["chat_knowledge"===m&&e.retry?(0,a.jsxs)(ek.Z,{onClick:B,slots:{root:eg.ZP},slotProps:{root:{variant:"plain",color:"primary"}},children:[(0,a.jsx)(eO.Z,{}),"\xa0",(0,a.jsx)("span",{className:"text-sm",children:f("Retry")})]}):null,(0,a.jsxs)("div",{className:"flex w-full flex-row-reverse",children:[(0,a.jsx)(eP,{select_param:M,conv_index:Math.ceil((t+1)/2),question:null===(l=null==P?void 0:P.filter(t=>(null==t?void 0:t.role)==="human"&&(null==t?void 0:t.order)===e.order)[0])||void 0===l?void 0:l.context,knowledge_space:_||d||""}),(0,a.jsx)(eC.Z,{title:f("Copy"),children:(0,a.jsx)(ek.Z,{onClick:()=>U(null==e?void 0:e.context),slots:{root:eg.ZP},slotProps:{root:{variant:"plain",color:"primary"}},sx:{borderRadius:40},children:(0,a.jsx)(O.Z,{})})})]})]})},t)}):(0,a.jsx)(eq.Z,{description:"Start a conversation"})})}),(0,a.jsx)("div",{className:j()("relative after:absolute after:-top-8 after:h-8 after:w-full after:bg-gradient-to-t after:from-theme-light after:to-transparent dark:after:from-theme-dark",{"cursor-not-allowed":"chat_excel"===m&&!(null==u?void 0:u.select_param)}),children:(0,a.jsxs)("div",{className:"flex flex-wrap w-full py-2 sm:pt-6 sm:pb-10 items-center",children:[x&&(0,a.jsx)("div",{className:"mr-2 flex",children:(0,y.A)(x)}),(0,a.jsx)(eR.Z,{loading:Z,onSubmit:T,handleFinish:k})]})}),(0,a.jsx)(eD.default,{title:"JSON Editor",open:C,width:"60%",cancelButtonProps:{hidden:!0},onOk:()=>{S(!1)},onCancel:()=>{S(!1)},children:(0,a.jsx)(c.Z,{className:"w-full h-[500px]",language:"json",value:E})})]})},eJ=l(67772),eL=l(45247),eF=()=>{var e;let t=(0,o.useSearchParams)(),{scene:l,chatId:c,model:i,agent:d,setModel:u,history:m,setHistory:x}=(0,s.useContext)(w.p),h=(0,n.Z)({}),p=null!==(e=t&&t.get("initMessage"))&&void 0!==e?e:"",[g,v]=(0,s.useState)(!1),[f,b]=(0,s.useState)(),y=async()=>{v(!0);let[,e]=await (0,eS.Vx)((0,eS.$i)(c));x(null!=e?e:[]),v(!1)},N=e=>{var t;let l=null===(t=e[e.length-1])||void 0===t?void 0:t.context;if(l)try{let e="string"==typeof l?JSON.parse(l):l;b((null==e?void 0:e.template_name)==="report"?null==e?void 0:e.charts:void 0)}catch(e){b(void 0)}};(0,r.Z)(async()=>{let e=(0,V.a_)();e&&e.id===c||await y()},[p,c]),(0,s.useEffect)(()=>{var e,t;if(!m.length)return;let l=null===(e=null===(t=m.filter(e=>"view"===e.role))||void 0===t?void 0:t.slice(-1))||void 0===e?void 0:e[0];(null==l?void 0:l.model_name)&&u(l.model_name),N(m)},[m.length]),(0,s.useEffect)(()=>()=>{x([])},[]);let _=(0,s.useCallback)((e,t)=>new Promise(a=>{let s=[...m,{role:"human",context:e,model_name:i,order:0,time_stamp:0},{role:"view",context:"",model_name:i,order:0,time_stamp:0}],r=s.length-1;x([...s]),h({data:{...t,chat_mode:l||"chat_normal",model_name:i,user_input:e},chatId:c,onMessage:e=>{(null==t?void 0:t.incremental)?s[r].context+=e:s[r].context=e,x([...s])},onDone:()=>{N(s),a()},onClose:()=>{N(s),a()},onError:e=>{s[r].context=e,x([...s]),a()}})}),[m,h,c,i,d,l]);return(0,a.jsxs)(a.Fragment,{children:[(0,a.jsx)(eL.Z,{visible:g}),(0,a.jsx)(eJ.Z,{refreshHistory:y,modelChange:e=>{u(e)}}),(0,a.jsxs)("div",{className:"px-4 flex flex-1 flex-wrap overflow-hidden relative",children:[!!(null==f?void 0:f.length)&&(0,a.jsx)("div",{className:"w-full pb-4 xl:w-3/4 h-1/2 xl:pr-4 xl:h-full overflow-y-auto",children:(0,a.jsx)(D.ZP,{chartsData:f})}),!(null==f?void 0:f.length)&&"chat_dashboard"===l&&(0,a.jsx)(eq.Z,{className:"w-full xl:w-3/4 h-1/2 xl:h-full"}),(0,a.jsx)("div",{className:j()("flex flex-1 flex-col overflow-hidden",{"px-0 xl:pl-4 h-1/2 w-full xl:w-auto xl:h-full border-t xl:border-t-0 xl:border-l dark:border-gray-800":"chat_dashboard"===l,"h-full lg:px-8":"chat_dashboard"!==l}),children:(0,a.jsx)(eA,{messages:m,onSubmit:_})})]})]})}},19409:function(e,t,l){l.d(t,{Z:function(){return R}});var a=l(85893),s=l(27496),r=l(79531),n=l(71577),o=l(67294),c=l(2487),i=l(83062),d=l(2453),u=l(46735),m=l(55241),x=l(39479),h=l(51009),p=l(58299),g=l(56155),v=l(30119),f=l(67421);let j=e=>{let{data:t,loading:l,submit:s,close:r}=e,{t:n}=(0,f.$G)(),o=e=>()=>{s(e),r()};return(0,a.jsx)("div",{style:{maxHeight:400,overflow:"auto"},children:(0,a.jsx)(c.Z,{dataSource:null==t?void 0:t.data,loading:l,rowKey:e=>e.prompt_name,renderItem:e=>(0,a.jsx)(c.Z.Item,{onClick:o(e.content),children:(0,a.jsx)(i.Z,{title:e.content,children:(0,a.jsx)(c.Z.Item.Meta,{style:{cursor:"copy"},title:e.prompt_name,description:n("Prompt_Info_Scene")+":".concat(e.chat_scene,",")+n("Prompt_Info_Sub_Scene")+":".concat(e.sub_chat_scene)})})},e.prompt_name)})})};var b=e=>{let{submit:t}=e,{t:l}=(0,f.$G)(),[s,r]=(0,o.useState)(!1),[n,c]=(0,o.useState)("common"),{data:b,loading:y}=(0,g.Z)(()=>(0,v.PR)("/prompt/list",{prompt_type:n}),{refreshDeps:[n],onError:e=>{d.ZP.error(null==e?void 0:e.message)}});return(0,a.jsx)(u.ZP,{theme:{components:{Popover:{minWidth:250}}},children:(0,a.jsx)(m.Z,{title:(0,a.jsx)(x.Z.Item,{label:"Prompt "+l("Type"),children:(0,a.jsx)(h.default,{style:{width:150},value:n,onChange:e=>{c(e)},options:[{label:l("Public")+" Prompts",value:"common"},{label:l("Private")+" Prompts",value:"private"}]})}),content:(0,a.jsx)(j,{data:b,loading:y,submit:t,close:()=>{r(!1)}}),placement:"topRight",trigger:"click",open:s,onOpenChange:e=>{r(e)},children:(0,a.jsx)(i.Z,{title:l("Click_Select")+" Prompt",children:(0,a.jsx)(p.Z,{className:"bottom-[30%]"})})})})},y=l(41468),w=l(89182),N=l(80573),_=l(5392),Z=l(84553);function k(e){let{dbParam:t,setDocId:l}=(0,o.useContext)(y.p),{onUploadFinish:s,handleFinish:r}=e,c=(0,N.Z)(),[i,d]=(0,o.useState)(!1),u=async e=>{d(!0);let a=new FormData;a.append("doc_name",e.file.name),a.append("doc_file",e.file),a.append("doc_type","DOCUMENT");let n=await (0,w.Vx)((0,w.iG)(t||"default",a));if(!n[1]){d(!1);return}l(n[1]),s(),d(!1),null==r||r(!0),await c(n[1]),null==r||r(!1)};return(0,a.jsx)(Z.default,{customRequest:u,showUploadList:!1,maxCount:1,multiple:!1,className:"absolute z-10 top-2 left-2",accept:".pdf,.ppt,.pptx,.xls,.xlsx,.doc,.docx,.txt,.md",children:(0,a.jsx)(n.ZP,{loading:i,size:"small",shape:"circle",icon:(0,a.jsx)(_.Z,{})})})}var C=l(11163),S=l(82353),P=l(1051);function D(e){let{document:t}=e;switch(t.status){case"RUNNING":return(0,a.jsx)(S.Rp,{});case"FINISHED":default:return(0,a.jsx)(S.s2,{});case"FAILED":return(0,a.jsx)(P.Z,{})}}function E(e){let{documents:t,dbParam:l}=e,s=(0,C.useRouter)(),r=e=>{s.push("/knowledge/chunk/?spaceName=".concat(l,"&id=").concat(e))};return(null==t?void 0:t.length)?(0,a.jsx)("div",{className:"absolute flex overflow-scroll h-12 top-[-35px] w-full z-10",children:t.map(e=>{let t;switch(e.status){case"RUNNING":t="#2db7f5";break;case"FINISHED":default:t="#87d068";break;case"FAILED":t="#f50"}return(0,a.jsx)(i.Z,{title:e.result,children:(0,a.jsxs)(n.ZP,{style:{color:t},onClick:()=>{r(e.id)},className:"shrink flex items-center mr-3",children:[(0,a.jsx)(D,{document:e}),e.doc_name]})},e.id)})}):null}var R=function(e){let{children:t,loading:l,onSubmit:c,handleFinish:i,...d}=e,{dbParam:u,scene:m}=(0,o.useContext)(y.p),[x,h]=(0,o.useState)(""),p=(0,o.useMemo)(()=>"chat_knowledge"===m,[m]),[g,v]=(0,o.useState)([]),f=(0,o.useRef)(0);async function j(){if(!u)return null;let[e,t]=await (0,w.Vx)((0,w._Q)(u,{page:1,page_size:f.current}));v(null==t?void 0:t.data)}(0,o.useEffect)(()=>{p&&j()},[u]);let N=async()=>{f.current+=1,await j()};return(0,a.jsxs)("div",{className:"flex-1 relative",children:[(0,a.jsx)(E,{documents:g,dbParam:u}),p&&(0,a.jsx)(k,{handleFinish:i,onUploadFinish:N,className:"absolute z-10 top-2 left-2"}),(0,a.jsx)(r.default.TextArea,{className:"flex-1 ".concat(p?"pl-10":""," pr-10"),size:"large",value:x,autoSize:{minRows:1,maxRows:4},...d,onPressEnter:e=>{if(x.trim()&&13===e.keyCode){if(e.shiftKey){e.preventDefault(),h(e=>e+"\n");return}c(x),setTimeout(()=>{h("")},0)}},onChange:e=>{if("number"==typeof d.maxLength){h(e.target.value.substring(0,d.maxLength));return}h(e.target.value)}}),(0,a.jsx)(n.ZP,{className:"ml-2 flex items-center justify-center absolute right-0 bottom-0",size:"large",type:"text",loading:l,icon:(0,a.jsx)(s.Z,{}),onClick:()=>{c(x)}}),(0,a.jsx)(b,{submit:e=>{h(x+e)}}),t]})}},45247:function(e,t,l){var a=l(85893),s=l(50888);t.Z=function(e){let{visible:t}=e;return t?(0,a.jsx)("div",{className:"absolute w-full h-full top-0 left-0 flex justify-center items-center z-10 bg-white dark:bg-black bg-opacity-50 dark:bg-opacity-50 backdrop-blur-sm text-3xl animate-fade animate-duration-200",children:(0,a.jsx)(s.Z,{})}):null}},43446:function(e,t,l){var a=l(1375),s=l(2453),r=l(67294),n=l(36353),o=l(41468),c=l(83454);t.Z=e=>{let{queryAgentURL:t="/api/v1/chat/completions"}=e,l=(0,r.useMemo)(()=>new AbortController,[]),{scene:i}=(0,r.useContext)(o.p),d=(0,r.useCallback)(async e=>{let{data:r,chatId:o,onMessage:d,onClose:u,onDone:m,onError:x}=e;if(!(null==r?void 0:r.user_input)&&!(null==r?void 0:r.doc_id)){s.ZP.warning(n.Z.t("no_context_tip"));return}let h={...r,conv_uid:o};if(!h.conv_uid){s.ZP.error("conv_uid 不存在,请刷新后重试");return}try{var p;await (0,a.L)("".concat(null!==(p=c.env.API_BASE_URL)&&void 0!==p?p:"").concat(t),{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(h),signal:l.signal,openWhenHidden:!0,async onopen(e){if(e.ok&&e.headers.get("content-type")===a.a)return},onclose(){l.abort(),null==u||u()},onerror(e){throw Error(e)},onmessage:e=>{let t=e.data;try{t="chat_agent"===i?JSON.parse(t).vis:JSON.parse(t)}catch(e){t.replaceAll("\\n","\n")}"string"==typeof t?"[DONE]"===t?null==m||m():(null==t?void 0:t.startsWith("[ERROR]"))?null==x||x(null==t?void 0:t.replace("[ERROR]","")):null==d||d(t):(null==d||d(t),null==m||m())}})}catch(e){l.abort(),null==x||x("Sorry, We meet some error, please try agin later.",e)}},[t]);return(0,r.useEffect)(()=>()=>{l.abort()},[]),d}},80573:function(e,t,l){var a=l(41468),s=l(67294),r=l(43446),n=l(89182);t.Z=()=>{let{history:e,setHistory:t,chatId:l,model:o,docId:c}=(0,s.useContext)(a.p),i=(0,r.Z)({queryAgentURL:"/knowledge/document/summary"}),d=(0,s.useCallback)(async e=>{let[,a]=await (0,n.Vx)((0,n.$i)(l)),s=[...a,{role:"human",context:"",model_name:o,order:0,time_stamp:0},{role:"view",context:"",model_name:o,order:0,time_stamp:0,retry:!0}],r=s.length-1;t([...s]),await i({data:{doc_id:e||c,model_name:o},chatId:l,onMessage:e=>{s[r].context=e,t([...s])}})},[e,o,c,l]);return d}}}]);
\ No newline at end of file
diff --git a/dbgpt/app/static/_next/static/chunks/pages/_app-7876cf4a861cac03.js b/dbgpt/app/static/_next/static/chunks/pages/_app-ccb54464fb48754c.js
similarity index 100%
rename from dbgpt/app/static/_next/static/chunks/pages/_app-7876cf4a861cac03.js
rename to dbgpt/app/static/_next/static/chunks/pages/_app-ccb54464fb48754c.js
diff --git a/dbgpt/app/static/_next/static/chunks/pages/index-0b2d61c1c6358f20.js b/dbgpt/app/static/_next/static/chunks/pages/index-0b2d61c1c6358f20.js
new file mode 100644
index 000000000..c72cc5925
--- /dev/null
+++ b/dbgpt/app/static/_next/static/chunks/pages/index-0b2d61c1c6358f20.js
@@ -0,0 +1 @@
+(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[5405],{48312:function(e,t,n){(window.__NEXT_P=window.__NEXT_P||[]).push(["/",function(){return n(57464)}])},81799:function(e,t,n){"use strict";n.d(t,{A:function(){return d}});var a=n(85893),s=n(41468),l=n(51009),r=n(19284),c=n(25675),o=n.n(c),i=n(67294),u=n(67421);function d(e,t){var n;let{width:s,height:l}=t||{};return e?(0,a.jsx)(o(),{className:"rounded-full border border-gray-200 object-contain bg-white inline-block",width:s||24,height:l||24,src:(null===(n=r.H[e])||void 0===n?void 0:n.icon)||"/models/huggingface.svg",alt:"llm"}):null}t.Z=function(e){let{onChange:t}=e,{t:n}=(0,u.$G)(),{modelList:c,model:o}=(0,i.useContext)(s.p);return!c||c.length<=0?null:(0,a.jsx)(l.default,{value:o,placeholder:n("choose_model"),className:"w-52",onChange:e=>{null==t||t(e)},children:c.map(e=>{var t;return(0,a.jsx)(l.default.Option,{children:(0,a.jsxs)("div",{className:"flex items-center",children:[d(e),(0,a.jsx)("span",{className:"ml-2",children:(null===(t=r.H[e])||void 0===t?void 0:t.label)||e})]})},e)})})}},19409:function(e,t,n){"use strict";n.d(t,{Z:function(){return I}});var a=n(85893),s=n(27496),l=n(79531),r=n(71577),c=n(67294),o=n(2487),i=n(83062),u=n(2453),d=n(46735),m=n(55241),h=n(39479),p=n(51009),x=n(58299),f=n(56155),_=n(30119),v=n(67421);let j=e=>{let{data:t,loading:n,submit:s,close:l}=e,{t:r}=(0,v.$G)(),c=e=>()=>{s(e),l()};return(0,a.jsx)("div",{style:{maxHeight:400,overflow:"auto"},children:(0,a.jsx)(o.Z,{dataSource:null==t?void 0:t.data,loading:n,rowKey:e=>e.prompt_name,renderItem:e=>(0,a.jsx)(o.Z.Item,{onClick:c(e.content),children:(0,a.jsx)(i.Z,{title:e.content,children:(0,a.jsx)(o.Z.Item.Meta,{style:{cursor:"copy"},title:e.prompt_name,description:r("Prompt_Info_Scene")+":".concat(e.chat_scene,",")+r("Prompt_Info_Sub_Scene")+":".concat(e.sub_chat_scene)})})},e.prompt_name)})})};var w=e=>{let{submit:t}=e,{t:n}=(0,v.$G)(),[s,l]=(0,c.useState)(!1),[r,o]=(0,c.useState)("common"),{data:w,loading:g}=(0,f.Z)(()=>(0,_.PR)("/prompt/list",{prompt_type:r}),{refreshDeps:[r],onError:e=>{u.ZP.error(null==e?void 0:e.message)}});return(0,a.jsx)(d.ZP,{theme:{components:{Popover:{minWidth:250}}},children:(0,a.jsx)(m.Z,{title:(0,a.jsx)(h.Z.Item,{label:"Prompt "+n("Type"),children:(0,a.jsx)(p.default,{style:{width:150},value:r,onChange:e=>{o(e)},options:[{label:n("Public")+" Prompts",value:"common"},{label:n("Private")+" Prompts",value:"private"}]})}),content:(0,a.jsx)(j,{data:w,loading:g,submit:t,close:()=>{l(!1)}}),placement:"topRight",trigger:"click",open:s,onOpenChange:e=>{l(e)},children:(0,a.jsx)(i.Z,{title:n("Click_Select")+" Prompt",children:(0,a.jsx)(x.Z,{className:"bottom-[30%]"})})})})},g=n(41468),b=n(89182),N=n(80573),y=n(5392),Z=n(84553);function P(e){let{dbParam:t,setDocId:n}=(0,c.useContext)(g.p),{onUploadFinish:s,handleFinish:l}=e,o=(0,N.Z)(),[i,u]=(0,c.useState)(!1),d=async e=>{u(!0);let a=new FormData;a.append("doc_name",e.file.name),a.append("doc_file",e.file),a.append("doc_type","DOCUMENT");let r=await (0,b.Vx)((0,b.iG)(t||"default",a));if(!r[1]){u(!1);return}n(r[1]),s(),u(!1),null==l||l(!0),await o(r[1]),null==l||l(!1)};return(0,a.jsx)(Z.default,{customRequest:d,showUploadList:!1,maxCount:1,multiple:!1,className:"absolute z-10 top-2 left-2",accept:".pdf,.ppt,.pptx,.xls,.xlsx,.doc,.docx,.txt,.md",children:(0,a.jsx)(r.ZP,{loading:i,size:"small",shape:"circle",icon:(0,a.jsx)(y.Z,{})})})}var k=n(11163),C=n(82353),S=n(1051);function R(e){let{document:t}=e;switch(t.status){case"RUNNING":return(0,a.jsx)(C.Rp,{});case"FINISHED":default:return(0,a.jsx)(C.s2,{});case"FAILED":return(0,a.jsx)(S.Z,{})}}function E(e){let{documents:t,dbParam:n}=e,s=(0,k.useRouter)(),l=e=>{s.push("/knowledge/chunk/?spaceName=".concat(n,"&id=").concat(e))};return(null==t?void 0:t.length)?(0,a.jsx)("div",{className:"absolute flex overflow-scroll h-12 top-[-35px] w-full z-10",children:t.map(e=>{let t;switch(e.status){case"RUNNING":t="#2db7f5";break;case"FINISHED":default:t="#87d068";break;case"FAILED":t="#f50"}return(0,a.jsx)(i.Z,{title:e.result,children:(0,a.jsxs)(r.ZP,{style:{color:t},onClick:()=>{l(e.id)},className:"shrink flex items-center mr-3",children:[(0,a.jsx)(R,{document:e}),e.doc_name]})},e.id)})}):null}var I=function(e){let{children:t,loading:n,onSubmit:o,handleFinish:i,...u}=e,{dbParam:d,scene:m}=(0,c.useContext)(g.p),[h,p]=(0,c.useState)(""),x=(0,c.useMemo)(()=>"chat_knowledge"===m,[m]),[f,_]=(0,c.useState)([]),v=(0,c.useRef)(0);async function j(){if(!d)return null;let[e,t]=await (0,b.Vx)((0,b._Q)(d,{page:1,page_size:v.current}));_(null==t?void 0:t.data)}(0,c.useEffect)(()=>{x&&j()},[d]);let N=async()=>{v.current+=1,await j()};return(0,a.jsxs)("div",{className:"flex-1 relative",children:[(0,a.jsx)(E,{documents:f,dbParam:d}),x&&(0,a.jsx)(P,{handleFinish:i,onUploadFinish:N,className:"absolute z-10 top-2 left-2"}),(0,a.jsx)(l.default.TextArea,{className:"flex-1 ".concat(x?"pl-10":""," pr-10"),size:"large",value:h,autoSize:{minRows:1,maxRows:4},...u,onPressEnter:e=>{if(h.trim()&&13===e.keyCode){if(e.shiftKey){e.preventDefault(),p(e=>e+"\n");return}o(h),setTimeout(()=>{p("")},0)}},onChange:e=>{if("number"==typeof u.maxLength){p(e.target.value.substring(0,u.maxLength));return}p(e.target.value)}}),(0,a.jsx)(r.ZP,{className:"ml-2 flex items-center justify-center absolute right-0 bottom-0",size:"large",type:"text",loading:n,icon:(0,a.jsx)(s.Z,{}),onClick:()=>{o(h)}}),(0,a.jsx)(w,{submit:e=>{p(h+e)}}),t]})}},43446:function(e,t,n){"use strict";var a=n(1375),s=n(2453),l=n(67294),r=n(36353),c=n(41468),o=n(83454);t.Z=e=>{let{queryAgentURL:t="/api/v1/chat/completions"}=e,n=(0,l.useMemo)(()=>new AbortController,[]),{scene:i}=(0,l.useContext)(c.p),u=(0,l.useCallback)(async e=>{let{data:l,chatId:c,onMessage:u,onClose:d,onDone:m,onError:h}=e;if(!(null==l?void 0:l.user_input)&&!(null==l?void 0:l.doc_id)){s.ZP.warning(r.Z.t("no_context_tip"));return}let p={...l,conv_uid:c};if(!p.conv_uid){s.ZP.error("conv_uid 不存在,请刷新后重试");return}try{var x;await (0,a.L)("".concat(null!==(x=o.env.API_BASE_URL)&&void 0!==x?x:"").concat(t),{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(p),signal:n.signal,openWhenHidden:!0,async onopen(e){if(e.ok&&e.headers.get("content-type")===a.a)return},onclose(){n.abort(),null==d||d()},onerror(e){throw Error(e)},onmessage:e=>{let t=e.data;try{t="chat_agent"===i?JSON.parse(t).vis:JSON.parse(t)}catch(e){t.replaceAll("\\n","\n")}"string"==typeof t?"[DONE]"===t?null==m||m():(null==t?void 0:t.startsWith("[ERROR]"))?null==h||h(null==t?void 0:t.replace("[ERROR]","")):null==u||u(t):(null==u||u(t),null==m||m())}})}catch(e){n.abort(),null==h||h("Sorry, We meet some error, please try agin later.",e)}},[t]);return(0,l.useEffect)(()=>()=>{n.abort()},[]),u}},80573:function(e,t,n){"use strict";var a=n(41468),s=n(67294),l=n(43446),r=n(89182);t.Z=()=>{let{history:e,setHistory:t,chatId:n,model:c,docId:o}=(0,s.useContext)(a.p),i=(0,l.Z)({queryAgentURL:"/knowledge/document/summary"}),u=(0,s.useCallback)(async e=>{let[,a]=await (0,r.Vx)((0,r.$i)(n)),s=[...a,{role:"human",context:"",model_name:c,order:0,time_stamp:0},{role:"view",context:"",model_name:c,order:0,time_stamp:0,retry:!0}],l=s.length-1;t([...s]),await i({data:{doc_id:e||o,model_name:c},chatId:n,onMessage:e=>{s[l].context=e,t([...s])}})},[e,c,o,n]);return u}},57464:function(e,t,n){"use strict";n.r(t);var a=n(85893),s=n(56155),l=n(67294),r=n(96074),c=n(75081),o=n(66309),i=n(39332),u=n(25675),d=n.n(u),m=n(89182),h=n(81799),p=n(41468),x=n(19409),f=n(67421),_=n(62418),v=n(53014),j=n(82353),w=n(94184),g=n.n(w);t.default=()=>{let e=(0,i.useRouter)(),{model:t,setModel:n}=(0,l.useContext)(p.p),{t:u}=(0,f.$G)(),[w,b]=(0,l.useState)(!1),[N,y]=(0,l.useState)(!1),{data:Z=[]}=(0,s.Z)(async()=>{y(!0);let[,e]=await (0,m.Vx)((0,m.CU)());return y(!1),null!=e?e:[]}),P=async n=>{b(!0);let[,a]=await (0,m.Vx)((0,m.sW)({chat_mode:"chat_normal"}));a&&(localStorage.setItem(_.rU,JSON.stringify({id:a.conv_uid,message:n})),e.push("/chat/?scene=chat_normal&id=".concat(a.conv_uid).concat(t?"&model=".concat(t):""))),b(!1)},k=async n=>{if(n.show_disable)return;let[,a]=await (0,m.Vx)((0,m.sW)({chat_mode:"chat_normal"}));a&&e.push("/chat?scene=".concat(n.chat_scene,"&id=").concat(a.conv_uid).concat(t?"&model=".concat(t):""))};return(0,a.jsx)("div",{className:"px-4 h-screen flex flex-col justify-center items-center overflow-hidden",children:(0,a.jsxs)("div",{className:"max-w-3xl max-h-screen overflow-y-auto",children:[(0,a.jsx)(d(),{src:"/LOGO.png",alt:"Revolutionizing Database Interactions with Private LLM Technology",width:856,height:160,className:"w-full mt-4",unoptimized:!0}),(0,a.jsx)(r.Z,{className:"!text-[#878c93] !my-6",plain:!0,children:u("Quick_Start")}),(0,a.jsx)(c.Z,{spinning:N,children:(0,a.jsx)("div",{className:"flex flex-wrap -m-1 md:-m-2",children:Z.map(e=>(0,a.jsx)("div",{className:"w-full sm:w-1/2 p-1 md:p-2",onClick:()=>{k(e)},children:(0,a.jsxs)("div",{className:g()("flex flex-row justify-center h-[102px] min-h-min bg-white dark:bg-[#232734] dark:text-white rounded p-4 cursor-pointer hover:-translate-y-1 transition-[transform_shadow] duration-300 hover:shadow-[0_14px_20px_-10px_rgba(100,100,100,.1)]",{"grayscale !cursor-no-drop":e.show_disable}),children:[function(e){switch(e){case"chat_knowledge":return(0,a.jsx)(v.Z,{className:"w-10 h-10 mr-4 p-1",component:j.je});case"chat_with_db_execute":return(0,a.jsx)(v.Z,{className:"w-10 h-10 mr-4 p-1",component:j.zM});case"chat_excel":return(0,a.jsx)(v.Z,{className:"w-10 h-10 mr-4 p-1",component:j.DL});case"chat_with_db_qa":return(0,a.jsx)(v.Z,{className:"w-10 h-10 mr-4 p-1",component:j.RD});case"chat_dashboard":return(0,a.jsx)(v.Z,{className:"w-10 h-10 mr-4 p-1",component:j.In});case"chat_agent":return(0,a.jsx)(v.Z,{className:"w-10 h-10 mr-4 p-1",component:j.si});case"dbgpt_chat":return(0,a.jsx)(v.Z,{className:"w-10 h-10 mr-4 p-1",component:j.O7});default:return null}}(e.chat_scene),(0,a.jsxs)("div",{className:"flex flex-col flex-1",children:[(0,a.jsxs)("h2",{className:"flex items-center text-lg font-sans font-semibold",children:[e.scene_name,e.show_disable&&(0,a.jsx)(o.Z,{className:"ml-2",children:"Comming soon"})]}),(0,a.jsx)("p",{className:"opacity-80 line-clamp-2",children:e.scene_describe})]})]})},e.chat_scene))})}),(0,a.jsx)("div",{className:"mt-8 mb-2",children:(0,a.jsx)(h.Z,{onChange:e=>{n(e)}})}),(0,a.jsx)("div",{className:"flex flex-1 w-full mb-4",children:(0,a.jsx)(x.Z,{loading:w,onSubmit:P})})]})})}},30119:function(e,t,n){"use strict";n.d(t,{Tk:function(){return o},PR:function(){return i}});var a=n(2453),s=n(6154),l=n(83454);let r=s.default.create({baseURL:l.env.API_BASE_URL});r.defaults.timeout=1e4,r.interceptors.response.use(e=>e.data,e=>Promise.reject(e)),n(96486);let c={"content-type":"application/json"},o=(e,t)=>{if(t){let n=Object.keys(t).filter(e=>void 0!==t[e]&&""!==t[e]).map(e=>"".concat(e,"=").concat(t[e])).join("&");n&&(e+="?".concat(n))}return r.get("/api"+e,{headers:c}).then(e=>e).catch(e=>{a.ZP.error(e),Promise.reject(e)})},i=(e,t)=>r.post(e,t,{headers:c}).then(e=>e).catch(e=>{a.ZP.error(e),Promise.reject(e)})}},function(e){e.O(0,[3662,2185,5503,1009,9479,1647,4553,411,2487,9305,1353,9774,2888,179],function(){return e(e.s=48312)}),_N_E=e.O()}]);
\ No newline at end of file
diff --git a/dbgpt/app/static/_next/static/chunks/pages/index-9d77aed53ca78d15.js b/dbgpt/app/static/_next/static/chunks/pages/index-9d77aed53ca78d15.js
deleted file mode 100644
index 2e68bd74b..000000000
--- a/dbgpt/app/static/_next/static/chunks/pages/index-9d77aed53ca78d15.js
+++ /dev/null
@@ -1 +0,0 @@
-(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[5405],{48312:function(e,t,n){(window.__NEXT_P=window.__NEXT_P||[]).push(["/",function(){return n(57464)}])},81799:function(e,t,n){"use strict";n.d(t,{A:function(){return d}});var a=n(85893),s=n(41468),l=n(51009),r=n(19284),c=n(25675),o=n.n(c),i=n(67294),u=n(67421);function d(e,t){var n;let{width:s,height:l}=t||{};return e?(0,a.jsx)(o(),{className:"rounded-full border border-gray-200 object-contain bg-white inline-block",width:s||24,height:l||24,src:(null===(n=r.H[e])||void 0===n?void 0:n.icon)||"/models/huggingface.svg",alt:"llm"}):null}t.Z=function(e){let{onChange:t}=e,{t:n}=(0,u.$G)(),{modelList:c,model:o}=(0,i.useContext)(s.p);return!c||c.length<=0?null:(0,a.jsx)(l.default,{value:o,placeholder:n("choose_model"),className:"w-52",onChange:e=>{null==t||t(e)},children:c.map(e=>{var t;return(0,a.jsx)(l.default.Option,{children:(0,a.jsxs)("div",{className:"flex items-center",children:[d(e),(0,a.jsx)("span",{className:"ml-2",children:(null===(t=r.H[e])||void 0===t?void 0:t.label)||e})]})},e)})})}},19409:function(e,t,n){"use strict";n.d(t,{Z:function(){return I}});var a=n(85893),s=n(27496),l=n(79531),r=n(71577),c=n(67294),o=n(2487),i=n(83062),u=n(2453),d=n(46735),m=n(55241),h=n(39479),p=n(51009),x=n(58299),f=n(56155),_=n(30119),j=n(67421);let v=e=>{let{data:t,loading:n,submit:s,close:l}=e,{t:r}=(0,j.$G)(),c=e=>()=>{s(e),l()};return(0,a.jsx)("div",{style:{maxHeight:400,overflow:"auto"},children:(0,a.jsx)(o.Z,{dataSource:null==t?void 0:t.data,loading:n,rowKey:e=>e.prompt_name,renderItem:e=>(0,a.jsx)(o.Z.Item,{onClick:c(e.content),children:(0,a.jsx)(i.Z,{title:e.content,children:(0,a.jsx)(o.Z.Item.Meta,{style:{cursor:"copy"},title:e.prompt_name,description:r("Prompt_Info_Scene")+":".concat(e.chat_scene,",")+r("Prompt_Info_Sub_Scene")+":".concat(e.sub_chat_scene)})})},e.prompt_name)})})};var w=e=>{let{submit:t}=e,{t:n}=(0,j.$G)(),[s,l]=(0,c.useState)(!1),[r,o]=(0,c.useState)("common"),{data:w,loading:g}=(0,f.Z)(()=>(0,_.PR)("/prompt/list",{prompt_type:r}),{refreshDeps:[r],onError:e=>{u.ZP.error(null==e?void 0:e.message)}});return(0,a.jsx)(d.ZP,{theme:{components:{Popover:{minWidth:250}}},children:(0,a.jsx)(m.Z,{title:(0,a.jsx)(h.Z.Item,{label:"Prompt "+n("Type"),children:(0,a.jsx)(p.default,{style:{width:150},value:r,onChange:e=>{o(e)},options:[{label:n("Public")+" Prompts",value:"common"},{label:n("Private")+" Prompts",value:"private"}]})}),content:(0,a.jsx)(v,{data:w,loading:g,submit:t,close:()=>{l(!1)}}),placement:"topRight",trigger:"click",open:s,onOpenChange:e=>{l(e)},children:(0,a.jsx)(i.Z,{title:n("Click_Select")+" Prompt",children:(0,a.jsx)(x.Z,{className:"bottom-[30%]"})})})})},g=n(41468),b=n(89182),N=n(80573),y=n(5392),Z=n(84553);function P(e){let{dbParam:t,setDocId:n}=(0,c.useContext)(g.p),{onUploadFinish:s,handleFinish:l}=e,o=(0,N.Z)(),[i,u]=(0,c.useState)(!1),d=async e=>{u(!0);let a=new FormData;a.append("doc_name",e.file.name),a.append("doc_file",e.file),a.append("doc_type","DOCUMENT");let r=await (0,b.Vx)((0,b.iG)(t||"default",a));if(!r[1]){u(!1);return}n(r[1]),s(),u(!1),null==l||l(!0),await o(r[1]),null==l||l(!1)};return(0,a.jsx)(Z.default,{customRequest:d,showUploadList:!1,maxCount:1,multiple:!1,className:"absolute z-10 top-2 left-2",accept:".pdf,.ppt,.pptx,.xls,.xlsx,.doc,.docx,.txt,.md",children:(0,a.jsx)(r.ZP,{loading:i,size:"small",shape:"circle",icon:(0,a.jsx)(y.Z,{})})})}var k=n(11163),C=n(82353),S=n(1051);function R(e){let{document:t}=e;switch(t.status){case"RUNNING":return(0,a.jsx)(C.Rp,{});case"FINISHED":default:return(0,a.jsx)(C.s2,{});case"FAILED":return(0,a.jsx)(S.Z,{})}}function E(e){let{documents:t,dbParam:n}=e,s=(0,k.useRouter)(),l=e=>{s.push("/knowledge/chunk/?spaceName=".concat(n,"&id=").concat(e))};return(null==t?void 0:t.length)?(0,a.jsx)("div",{className:"absolute flex overflow-scroll h-12 top-[-35px] w-full z-10",children:t.map(e=>{let t;switch(e.status){case"RUNNING":t="#2db7f5";break;case"FINISHED":default:t="#87d068";break;case"FAILED":t="#f50"}return(0,a.jsx)(i.Z,{title:e.result,children:(0,a.jsxs)(r.ZP,{style:{color:t},onClick:()=>{l(e.id)},className:"shrink flex items-center mr-3",children:[(0,a.jsx)(R,{document:e}),e.doc_name]})},e.id)})}):null}var I=function(e){let{children:t,loading:n,onSubmit:o,handleFinish:i,...u}=e,{dbParam:d,scene:m}=(0,c.useContext)(g.p),[h,p]=(0,c.useState)(""),x=(0,c.useMemo)(()=>"chat_knowledge"===m,[m]),[f,_]=(0,c.useState)([]),j=(0,c.useRef)(0);async function v(){if(!d)return null;let[e,t]=await (0,b.Vx)((0,b._Q)(d,{page:1,page_size:j.current}));_(null==t?void 0:t.data)}(0,c.useEffect)(()=>{x&&v()},[d]);let N=async()=>{j.current+=1,await v()};return(0,a.jsxs)("div",{className:"flex-1 relative",children:[(0,a.jsx)(E,{documents:f,dbParam:d}),x&&(0,a.jsx)(P,{handleFinish:i,onUploadFinish:N,className:"absolute z-10 top-2 left-2"}),(0,a.jsx)(l.default.TextArea,{className:"flex-1 ".concat(x?"pl-10":""," pr-10"),size:"large",value:h,autoSize:{minRows:1,maxRows:4},...u,onPressEnter:e=>{if(h.trim()&&13===e.keyCode){if(e.shiftKey){p(e=>e+"\n");return}o(h),setTimeout(()=>{p("")},0)}},onChange:e=>{if("number"==typeof u.maxLength){p(e.target.value.substring(0,u.maxLength));return}p(e.target.value)}}),(0,a.jsx)(r.ZP,{className:"ml-2 flex items-center justify-center absolute right-0 bottom-0",size:"large",type:"text",loading:n,icon:(0,a.jsx)(s.Z,{}),onClick:()=>{o(h)}}),(0,a.jsx)(w,{submit:e=>{p(h+e)}}),t]})}},43446:function(e,t,n){"use strict";var a=n(1375),s=n(2453),l=n(67294),r=n(36353),c=n(41468),o=n(83454);t.Z=e=>{let{queryAgentURL:t="/api/v1/chat/completions"}=e,n=(0,l.useMemo)(()=>new AbortController,[]),{scene:i}=(0,l.useContext)(c.p),u=(0,l.useCallback)(async e=>{let{data:l,chatId:c,onMessage:u,onClose:d,onDone:m,onError:h}=e;if(!(null==l?void 0:l.user_input)&&!(null==l?void 0:l.doc_id)){s.ZP.warning(r.Z.t("no_context_tip"));return}let p={...l,conv_uid:c};if(!p.conv_uid){s.ZP.error("conv_uid 不存在,请刷新后重试");return}try{var x;await (0,a.L)("".concat(null!==(x=o.env.API_BASE_URL)&&void 0!==x?x:"").concat(t),{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(p),signal:n.signal,openWhenHidden:!0,async onopen(e){if(e.ok&&e.headers.get("content-type")===a.a)return},onclose(){n.abort(),null==d||d()},onerror(e){throw Error(e)},onmessage:e=>{let t=e.data;try{t="chat_agent"===i?JSON.parse(t).vis:JSON.parse(t)}catch(e){t.replaceAll("\\n","\n")}"string"==typeof t?"[DONE]"===t?null==m||m():(null==t?void 0:t.startsWith("[ERROR]"))?null==h||h(null==t?void 0:t.replace("[ERROR]","")):null==u||u(t):(null==u||u(t),null==m||m())}})}catch(e){n.abort(),null==h||h("Sorry, We meet some error, please try agin later.",e)}},[t]);return(0,l.useEffect)(()=>()=>{n.abort()},[]),u}},80573:function(e,t,n){"use strict";var a=n(41468),s=n(67294),l=n(43446),r=n(89182);t.Z=()=>{let{history:e,setHistory:t,chatId:n,model:c,docId:o}=(0,s.useContext)(a.p),i=(0,l.Z)({queryAgentURL:"/knowledge/document/summary"}),u=(0,s.useCallback)(async e=>{let[,a]=await (0,r.Vx)((0,r.$i)(n)),s=[...a,{role:"human",context:"",model_name:c,order:0,time_stamp:0},{role:"view",context:"",model_name:c,order:0,time_stamp:0,retry:!0}],l=s.length-1;t([...s]),await i({data:{doc_id:e||o,model_name:c},chatId:n,onMessage:e=>{s[l].context=e,t([...s])}})},[e,c,o,n]);return u}},57464:function(e,t,n){"use strict";n.r(t);var a=n(85893),s=n(56155),l=n(67294),r=n(96074),c=n(75081),o=n(66309),i=n(39332),u=n(25675),d=n.n(u),m=n(89182),h=n(81799),p=n(41468),x=n(19409),f=n(67421),_=n(62418),j=n(53014),v=n(82353),w=n(94184),g=n.n(w);t.default=()=>{let e=(0,i.useRouter)(),{model:t,setModel:n}=(0,l.useContext)(p.p),{t:u}=(0,f.$G)(),[w,b]=(0,l.useState)(!1),[N,y]=(0,l.useState)(!1),{data:Z=[]}=(0,s.Z)(async()=>{y(!0);let[,e]=await (0,m.Vx)((0,m.CU)());return y(!1),null!=e?e:[]}),P=async n=>{b(!0);let[,a]=await (0,m.Vx)((0,m.sW)({chat_mode:"chat_normal"}));a&&(localStorage.setItem(_.rU,JSON.stringify({id:a.conv_uid,message:n})),e.push("/chat/?scene=chat_normal&id=".concat(a.conv_uid).concat(t?"&model=".concat(t):""))),b(!1)},k=async n=>{if(n.show_disable)return;let[,a]=await (0,m.Vx)((0,m.sW)({chat_mode:"chat_normal"}));a&&e.push("/chat?scene=".concat(n.chat_scene,"&id=").concat(a.conv_uid).concat(t?"&model=".concat(t):""))};return(0,a.jsx)("div",{className:"px-4 h-screen flex flex-col justify-center items-center overflow-hidden",children:(0,a.jsxs)("div",{className:"max-w-3xl max-h-screen overflow-y-auto",children:[(0,a.jsx)(d(),{src:"/LOGO.png",alt:"Revolutionizing Database Interactions with Private LLM Technology",width:856,height:160,className:"w-full mt-4",unoptimized:!0}),(0,a.jsx)(r.Z,{className:"!text-[#878c93] !my-6",plain:!0,children:u("Quick_Start")}),(0,a.jsx)(c.Z,{spinning:N,children:(0,a.jsx)("div",{className:"flex flex-wrap -m-1 md:-m-2",children:Z.map(e=>(0,a.jsx)("div",{className:"w-full sm:w-1/2 p-1 md:p-2",onClick:()=>{k(e)},children:(0,a.jsxs)("div",{className:g()("flex flex-row justify-center h-[102px] min-h-min bg-white dark:bg-[#232734] dark:text-white rounded p-4 cursor-pointer hover:-translate-y-1 transition-[transform_shadow] duration-300 hover:shadow-[0_14px_20px_-10px_rgba(100,100,100,.1)]",{"grayscale !cursor-no-drop":e.show_disable}),children:[function(e){switch(e){case"chat_knowledge":return(0,a.jsx)(j.Z,{className:"w-10 h-10 mr-4 p-1",component:v.je});case"chat_with_db_execute":return(0,a.jsx)(j.Z,{className:"w-10 h-10 mr-4 p-1",component:v.zM});case"chat_excel":return(0,a.jsx)(j.Z,{className:"w-10 h-10 mr-4 p-1",component:v.DL});case"chat_with_db_qa":return(0,a.jsx)(j.Z,{className:"w-10 h-10 mr-4 p-1",component:v.RD});case"chat_dashboard":return(0,a.jsx)(j.Z,{className:"w-10 h-10 mr-4 p-1",component:v.In});case"chat_agent":return(0,a.jsx)(j.Z,{className:"w-10 h-10 mr-4 p-1",component:v.si});case"dbgpt_chat":return(0,a.jsx)(j.Z,{className:"w-10 h-10 mr-4 p-1",component:v.O7});default:return null}}(e.chat_scene),(0,a.jsxs)("div",{className:"flex flex-col flex-1",children:[(0,a.jsxs)("h2",{className:"flex items-center text-lg font-sans font-semibold",children:[e.scene_name,e.show_disable&&(0,a.jsx)(o.Z,{className:"ml-2",children:"Comming soon"})]}),(0,a.jsx)("p",{className:"opacity-80 line-clamp-2",children:e.scene_describe})]})]})},e.chat_scene))})}),(0,a.jsx)("div",{className:"mt-8 mb-2",children:(0,a.jsx)(h.Z,{onChange:e=>{n(e)}})}),(0,a.jsx)("div",{className:"flex flex-1 w-full mb-4",children:(0,a.jsx)(x.Z,{loading:w,onSubmit:P})})]})})}},30119:function(e,t,n){"use strict";n.d(t,{Tk:function(){return o},PR:function(){return i}});var a=n(2453),s=n(6154),l=n(83454);let r=s.default.create({baseURL:l.env.API_BASE_URL});r.defaults.timeout=1e4,r.interceptors.response.use(e=>e.data,e=>Promise.reject(e)),n(96486);let c={"content-type":"application/json"},o=(e,t)=>{if(t){let n=Object.keys(t).filter(e=>void 0!==t[e]&&""!==t[e]).map(e=>"".concat(e,"=").concat(t[e])).join("&");n&&(e+="?".concat(n))}return r.get("/api"+e,{headers:c}).then(e=>e).catch(e=>{a.ZP.error(e),Promise.reject(e)})},i=(e,t)=>r.post(e,t,{headers:c}).then(e=>e).catch(e=>{a.ZP.error(e),Promise.reject(e)})}},function(e){e.O(0,[3662,2185,5503,1009,9479,1647,4553,411,2487,9305,1353,9774,2888,179],function(){return e(e.s=48312)}),_N_E=e.O()}]);
\ No newline at end of file
diff --git a/dbgpt/app/static/_next/static/chunks/pages/knowledge-223d50e9531bd961.js b/dbgpt/app/static/_next/static/chunks/pages/knowledge-223d50e9531bd961.js
new file mode 100644
index 000000000..8a24e3280
--- /dev/null
+++ b/dbgpt/app/static/_next/static/chunks/pages/knowledge-223d50e9531bd961.js
@@ -0,0 +1 @@
+(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[8662],{54681:function(e,t,a){(window.__NEXT_P=window.__NEXT_P||[]).push(["/knowledge",function(){return a(18671)}])},26892:function(e,t,a){"use strict";var l=a(85893),s=a(67294),n=a(66309),r=a(83062),i=a(94184),c=a.n(i),o=a(25675),d=a.n(o);t.Z=(0,s.memo)(function(e){let{icon:t,iconBorder:a=!0,title:i,desc:o,tags:m,children:u,disabled:x,operations:h,className:p,..._}=e,j=(0,s.useMemo)(()=>t?"string"==typeof t?(0,l.jsx)(d(),{className:c()("w-11 h-11 rounded-full mr-4 object-contain bg-white",{"border border-gray-200":a}),width:48,height:48,src:t,alt:i}):t:null,[t]),f=(0,s.useMemo)(()=>m&&m.length?(0,l.jsx)("div",{className:"flex items-center mt-1 flex-wrap",children:m.map((e,t)=>{var a;return"string"==typeof e?(0,l.jsx)(n.Z,{className:"text-xs",bordered:!1,color:"default",children:e},t):(0,l.jsx)(n.Z,{className:"text-xs",bordered:null!==(a=e.border)&&void 0!==a&&a,color:e.color,children:e.text},t)})}):null,[m]);return(0,l.jsxs)("div",{className:c()("group/card relative flex flex-col w-72 rounded justify-between text-black bg-white shadow-[0_8px_16px_-10px_rgba(100,100,100,.08)] hover:shadow-[0_14px_20px_-10px_rgba(100,100,100,.15)] dark:bg-[#232734] dark:text-white dark:hover:border-white transition-[transfrom_shadow] duration-300 hover:-translate-y-1 min-h-fit",{"grayscale cursor-no-drop":x,"cursor-pointer":!x&&!!_.onClick},p),..._,children:[(0,l.jsxs)("div",{className:"p-4",children:[(0,l.jsxs)("div",{className:"flex items-center",children:[j,(0,l.jsxs)("div",{className:"flex flex-col",children:[(0,l.jsx)("h2",{className:"text-sm font-semibold",children:i}),f]})]}),o&&(0,l.jsx)(r.Z,{title:o,children:(0,l.jsx)("p",{className:"mt-2 text-sm text-gray-500 font-normal line-clamp-2",children:o})})]}),(0,l.jsxs)("div",{children:[u,h&&!!h.length&&(0,l.jsx)("div",{className:"flex flex-wrap items-center justify-center border-t border-solid border-gray-100 dark:border-theme-dark",children:h.map((e,t)=>(0,l.jsx)(r.Z,{title:e.label,children:(0,l.jsxs)("div",{className:"relative flex flex-1 items-center justify-center h-11 text-gray-400 hover:text-blue-500 transition-colors duration-300 cursor-pointer",onClick:t=>{var a;t.stopPropagation(),null===(a=e.onClick)||void 0===a||a.call(e)},children:[e.children,t{let[e,a]=await (0,C.Vx)((0,C.Tu)(t.name));d(a)};(0,n.useEffect)(()=>{x()},[t.name]);let h=[{key:"Embedding",label:(0,s.jsxs)("div",{children:[(0,s.jsx)(G.Z,{}),r("Embedding")]}),children:(0,s.jsxs)(F.Z,{gutter:24,children:[(0,s.jsx)(A.Z,{span:12,offset:0,children:(0,s.jsx)(U.Z.Item,{tooltip:r("the_top_k_vectors"),rules:[{required:!0}],label:r("topk"),name:["embedding","topk"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12"})})}),(0,s.jsx)(A.Z,{span:12,children:(0,s.jsx)(U.Z.Item,{tooltip:r("Set_a_threshold_score"),rules:[{required:!0}],label:r("recall_score"),name:["embedding","recall_score"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:r("Please_input_the_owner")})})}),(0,s.jsx)(A.Z,{span:12,children:(0,s.jsx)(U.Z.Item,{tooltip:r("recall_type"),rules:[{required:!0}],label:r("recall_type"),name:["embedding","recall_type"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12"})})}),(0,s.jsx)(A.Z,{span:12,children:(0,s.jsx)(U.Z.Item,{tooltip:r("A_model_used"),rules:[{required:!0}],label:r("model"),name:["embedding","model"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12"})})}),(0,s.jsx)(A.Z,{span:12,children:(0,s.jsx)(U.Z.Item,{tooltip:r("The_size_of_the_data_chunks"),rules:[{required:!0}],label:r("chunk_size"),name:["embedding","chunk_size"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12"})})}),(0,s.jsx)(A.Z,{span:12,children:(0,s.jsx)(U.Z.Item,{tooltip:r("The_amount_of_overlap"),rules:[{required:!0}],label:r("chunk_overlap"),name:["embedding","chunk_overlap"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:r("Please_input_the_description")})})})]})},{key:"Prompt",label:(0,s.jsxs)("div",{children:[(0,s.jsx)(q.Z,{}),r("Prompt")]}),children:(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(U.Z.Item,{tooltip:r("A_contextual_parameter"),label:r("scene"),name:["prompt","scene"],children:(0,s.jsx)(M,{rows:4,className:"mb-2"})}),(0,s.jsx)(U.Z.Item,{tooltip:r("structure_or_format"),label:r("template"),name:["prompt","template"],children:(0,s.jsx)(M,{rows:7,className:"mb-2"})}),(0,s.jsx)(U.Z.Item,{tooltip:r("The_maximum_number_of_tokens"),label:r("max_token"),name:["prompt","max_token"],children:(0,s.jsx)(D.default,{className:"mb-2"})})]})},{key:"Summary",label:(0,s.jsxs)("div",{children:[(0,s.jsx)(z.Z,{}),r("Summary")]}),children:(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(U.Z.Item,{rules:[{required:!0}],label:r("max_iteration"),name:["summary","max_iteration"],children:(0,s.jsx)(D.default,{className:"mb-2"})}),(0,s.jsx)(U.Z.Item,{rules:[{required:!0}],label:r("concurrency_limit"),name:["summary","concurrency_limit"],children:(0,s.jsx)(D.default,{className:"mb-2"})})]})}],p=async e=>{u(!0);let[a,s,n]=await (0,C.Vx)((0,C.iH)(t.name,{argument:JSON.stringify(e)}));u(!1),(null==n?void 0:n.success)&&l(!1)};return(0,s.jsx)(c.default,{width:850,open:a,onCancel:()=>{l(!1)},footer:null,children:(0,s.jsx)(w.Z,{spinning:m,children:(0,s.jsxs)(U.Z,{size:"large",className:"mt-4",layout:"vertical",name:"basic",initialValues:{...o},autoComplete:"off",onFinish:p,children:[(0,s.jsx)(V.Z,{items:h}),(0,s.jsxs)("div",{className:"mt-3 mb-3",children:[(0,s.jsx)(i.ZP,{htmlType:"submit",type:"primary",className:"mr-6",children:r("Submit")}),(0,s.jsx)(i.ZP,{onClick:()=>{l(!1)},children:r("close")})]})]})})})}var L=a(47207);let{confirm:H}=c.default;function K(e){let{space:t}=e,{t:a}=(0,O.$G)(),l=(0,x.useRouter)(),[c,o]=(0,n.useState)(!1),[d,m]=(0,n.useState)([]),[u,p]=(0,n.useState)(!1),[_,j]=(0,n.useState)(0),I=(0,n.useRef)(1),D=(0,n.useMemo)(()=>d.length<_,[d.length,_]),F=e=>{H({title:a("Tips"),icon:(0,s.jsx)(h.Z,{}),content:"".concat(a("Del_Document_Tips"),"?"),okText:"Yes",okType:"danger",cancelText:"No",async onOk(){await G(e)}})};async function A(){o(!0);let[e,a]=await (0,C.Vx)((0,C._Q)(t.name,{page:I.current,page_size:18}));m(null==a?void 0:a.data),j((null==a?void 0:a.total)||0),o(!1)}let U=async()=>{if(!D)return;o(!0),I.current+=1;let[e,a]=await (0,C.Vx)((0,C._Q)(t.name,{page:I.current,page_size:18}));m([...d,...a.data]),o(!1)},V=async(e,t)=>{await (0,C.Vx)((0,C.Hx)(e,{doc_ids:[t]}))},G=async a=>{await (0,C.Vx)((0,C.n3)(t.name,{doc_name:a.doc_name})),A(),e.onDeleteDoc()},q=()=>{e.onAddDoc(t.name)},z=(e,t)=>{let a;switch(e){case"TODO":a="gold";break;case"RUNNING":a="#2db7f5";break;case"FINISHED":a="cyan";break;default:a="red"}return(0,s.jsx)(g.Z,{title:t,children:(0,s.jsx)(b.Z,{color:a,children:e})})};return(0,n.useEffect)(()=>{A()},[t]),(0,s.jsxs)("div",{className:"collapse-container pt-2 px-4",children:[(0,s.jsxs)(v.Z,{children:[(0,s.jsx)(i.ZP,{size:"middle",type:"primary",className:"flex items-center",icon:(0,s.jsx)(r.Z,{}),onClick:q,children:a("Add_Datasource")}),(0,s.jsx)(i.ZP,{size:"middle",className:"flex items-center mx-2",icon:(0,s.jsx)(P.Z,{}),onClick:()=>{p(!0)},children:"Arguments"}),"KnowledgeGraph"===t.vector_type&&(0,s.jsx)(i.ZP,{size:"middle",className:"flex items-center mx-2",icon:(0,s.jsx)(S.Z,{}),onClick:()=>{l.push("/knowledge/graph/?spaceName=".concat(t.name))},children:a("View_Graph")})]}),(0,s.jsx)(y.Z,{}),(0,s.jsx)(w.Z,{spinning:c,children:(null==d?void 0:d.length)>0?(0,s.jsxs)("div",{className:"max-h-96 overflow-auto max-w-3/4",children:[(0,s.jsx)("div",{className:"mt-3 grid grid-cols-1 gap-x-6 gap-y-5 sm:grid-cols-2 lg:grid-cols-3 xl:gap-x-5",children:d.map(e=>(0,s.jsxs)(N.Z,{className:" dark:bg-[#484848] relative shrink-0 grow-0 cursor-pointer rounded-[10px] border border-gray-200 border-solid w-full",title:(0,s.jsx)(g.Z,{title:e.doc_name,children:(0,s.jsxs)("div",{className:"truncate ",children:[(0,s.jsx)(L.Z,{type:e.doc_type}),(0,s.jsx)("span",{children:e.doc_name})]})}),extra:(0,s.jsxs)("div",{className:"mx-3",children:[(0,s.jsx)(g.Z,{title:"detail",children:(0,s.jsx)(k.Z,{className:"mr-2 !text-lg",style:{color:"#1b7eff",fontSize:"20px"},onClick:()=>{l.push("/knowledge/chunk/?spaceName=".concat(t.name,"&id=").concat(e.id))}})}),(0,s.jsx)(g.Z,{title:"Sync",children:(0,s.jsx)(T.Z,{className:"mr-2 !text-lg",style:{color:"#1b7eff",fontSize:"20px"},onClick:()=>{V(t.name,e.id)}})}),(0,s.jsx)(g.Z,{title:"Delete",children:(0,s.jsx)(f.Z,{className:"text-[#ff1b2e] !text-lg",onClick:()=>{F(e)}})})]}),children:[(0,s.jsxs)("p",{className:"mt-2 font-semibold ",children:[a("Size"),":"]}),(0,s.jsxs)("p",{children:[e.chunk_size," chunks"]}),(0,s.jsxs)("p",{className:"mt-2 font-semibold ",children:[a("Last_Sync"),":"]}),(0,s.jsx)("p",{children:E()(e.last_sync).format("YYYY-MM-DD HH:MM:SS")}),(0,s.jsx)("p",{className:"mt-2 mb-2",children:z(e.status,e.result)})]},e.id))}),D&&(0,s.jsx)(y.Z,{children:(0,s.jsx)("span",{className:"cursor-pointer",onClick:U,children:a("Load_more")})})]}):(0,s.jsx)(Z.Z,{image:Z.Z.PRESENTED_IMAGE_DEFAULT,children:(0,s.jsx)(i.ZP,{type:"primary",className:"flex items-center mx-auto",icon:(0,s.jsx)(r.Z,{}),onClick:q,children:"Create Now"})})}),(0,s.jsx)(R,{space:t,argumentsShow:u,setArgumentsShow:p})]})}var Y=a(26892);let{confirm:X}=c.default;function $(e){let t=(0,x.useRouter)(),{t:a}=(0,O.$G)(),{space:l,getSpaces:n}=e,r=()=>{X({title:a("Tips"),icon:(0,s.jsx)(h.Z,{}),content:"".concat(a("Del_Knowledge_Tips"),"?"),okText:"Yes",okType:"danger",cancelText:"No",async onOk(){await (0,C.Vx)((0,C.XK)({name:null==l?void 0:l.name})),n()}})},i=async()=>{let[e,a]=await (0,C.Vx)((0,C.sW)({chat_mode:"chat_knowledge"}));(null==a?void 0:a.conv_uid)&&t.push("/chat?scene=chat_knowledge&id=".concat(null==a?void 0:a.conv_uid,"&db_param=").concat(l.name))};return(0,s.jsx)(d.ZP,{theme:{components:{Popover:{zIndexPopup:90}}},children:(0,s.jsx)(m.Z,{className:"cursor-pointer",placement:"bottom",trigger:"click",content:(0,s.jsx)(K,{space:l,onAddDoc:e.onAddDoc,onDeleteDoc:function(){n()}}),children:(0,s.jsx)(u.Z,{className:"mb-4 min-w-[200px] sm:w-60 lg:w-72",count:l.docs||0,children:(0,s.jsx)(Y.Z,{title:l.name,desc:l.desc,icon:"KnowledgeGraph"===l.vector_type?"/models/knowledge-graph.png":"FullText"===l.vector_type?"/models/knowledge-full-text.jpg":"/models/knowledge-default.jpg",iconBorder:!1,tags:[{text:(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(p.Z,{className:"mr-1"}),null==l?void 0:l.owner]})},{text:(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(_.Z,{className:"mr-1"}),E()(l.gmt_modified).format("YYYY-MM-DD")]})}],operations:[{label:a("Chat"),children:(0,s.jsx)(j.Z,{}),onClick:i},{label:a("Delete"),children:(0,s.jsx)(f.Z,{}),onClick:()=>{r()}}]})})})})}var W=a(84553),B=a(2453),Q=a(64082),J=a(94184),ee=a.n(J);let{Dragger:et}=W.default,{TextArea:ea}=D.default;function el(e){let{className:t,handleStepChange:a,spaceName:l,docType:r}=e,{t:c}=(0,O.$G)(),[o]=U.Z.useForm(),[d,m]=(0,n.useState)(!1),[u,x]=(0,n.useState)([]),h=async e=>{let t;let{docName:s,textSource:n,text:i,webPageUrl:c}=e;switch(m(!0),r){case"URL":[,t]=await (0,C.Vx)((0,C.H_)(l,{doc_name:s,content:c,doc_type:"URL"}));break;case"TEXT":[,t]=await (0,C.Vx)((0,C.H_)(l,{doc_name:s,source:n,content:i,doc_type:"TEXT"}))}return(m(!1),"DOCUMENT"===r&&u.length<1)?B.ZP.error("Upload failed, please re-upload."):"DOCUMENT"===r||t?void a({label:"forward",files:"DOCUMENT"===r?u:[{name:s,doc_id:t||-1}]}):B.ZP.error("Upload failed, please re-upload.")},p=e=>{let{file:t,fileList:a}=e;0===a.length&&o.setFieldValue("originFileObj",null)},_=async e=>{let{onSuccess:t,onError:a,file:s}=e,n=new FormData,r=null==s?void 0:s.name;n.append("doc_name",r),n.append("doc_file",s),n.append("doc_type","DOCUMENT");let[,i]=await (0,C.Vx)((0,C.iG)(l,n));Number.isInteger(i)?(t&&t(i||0),x(e=>(e.push({name:r,doc_id:i||-1}),e))):a&&a({name:"",message:""})},j=()=>(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(U.Z.Item,{label:"".concat(c("Name"),":"),name:"docName",rules:[{required:!0,message:c("Please_input_the_name")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:c("Please_input_the_name")})}),(0,s.jsx)(U.Z.Item,{label:"".concat(c("Text_Source"),":"),name:"textSource",rules:[{required:!0,message:c("Please_input_the_text_source")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:c("Please_input_the_text_source")})}),(0,s.jsx)(U.Z.Item,{label:"".concat(c("Text"),":"),name:"text",rules:[{required:!0,message:c("Please_input_the_description")}],children:(0,s.jsx)(ea,{rows:4})})]}),f=()=>(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(U.Z.Item,{label:"".concat(c("Name"),":"),name:"docName",rules:[{required:!0,message:c("Please_input_the_name")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:c("Please_input_the_name")})}),(0,s.jsx)(U.Z.Item,{label:"".concat(c("Web_Page_URL"),":"),name:"webPageUrl",rules:[{required:!0,message:c("Please_input_the_Web_Page_URL")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:c("Please_input_the_Web_Page_URL")})})]}),g=()=>(0,s.jsx)(s.Fragment,{children:(0,s.jsx)(U.Z.Item,{name:"originFileObj",rules:[{required:!0,message:c("Please_select_file")}],children:(0,s.jsxs)(et,{multiple:!0,onChange:p,maxCount:10,accept:".pdf,.ppt,.pptx,.xls,.xlsx,.doc,.docx,.txt,.md",customRequest:_,children:[(0,s.jsx)("p",{className:"ant-upload-drag-icon",children:(0,s.jsx)(Q.Z,{})}),(0,s.jsx)("p",{style:{color:"rgb(22, 108, 255)",fontSize:"20px"},children:c("Select_or_Drop_file")}),(0,s.jsx)("p",{className:"ant-upload-hint",style:{color:"rgb(22, 108, 255)"},children:"PDF, PowerPoint, Excel, Word, Text, Markdown,"})]})})});return(0,s.jsx)(w.Z,{spinning:d,children:(0,s.jsxs)(U.Z,{form:o,size:"large",className:ee()("mt-4",t),layout:"vertical",name:"basic",initialValues:{remember:!0},autoComplete:"off",onFinish:h,children:[(()=>{switch(r){case"URL":return f();case"DOCUMENT":return g();default:return j()}})(),(0,s.jsxs)(U.Z.Item,{children:[(0,s.jsx)(i.ZP,{onClick:()=>{a({label:"back"})},className:"mr-4",children:"".concat(c("Back"))}),(0,s.jsx)(i.ZP,{type:"primary",loading:d,htmlType:"submit",children:c("Next")})]})]})})}var es=a(51009);function en(e){let{t}=(0,O.$G)(),{handleStepChange:a}=e,[l,r]=(0,n.useState)(!1),c=async e=>{let{spaceName:t,owner:l,description:s,storage:n}=e;r(!0);let[i,c,o]=await (0,C.Vx)((0,C.be)({name:t,vector_type:n,owner:l,desc:s}));r(!1),(null==o?void 0:o.success)&&a({label:"forward",spaceName:t})};return(0,s.jsx)(w.Z,{spinning:l,children:(0,s.jsxs)(U.Z,{size:"large",className:"mt-4",layout:"vertical",name:"basic",initialValues:{remember:!0},autoComplete:"off",onFinish:c,children:[(0,s.jsx)(U.Z.Item,{label:t("Knowledge_Space_Name"),name:"spaceName",rules:[{required:!0,message:t("Please_input_the_name")},()=>({validator:(e,a)=>/[^\u4e00-\u9fa50-9a-zA-Z_-]/.test(a)?Promise.reject(Error(t("the_name_can_only_contain"))):Promise.resolve()})],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:t("Please_input_the_name")})}),(0,s.jsx)(U.Z.Item,{label:t("Owner"),name:"owner",rules:[{required:!0,message:t("Please_input_the_owner")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:t("Please_input_the_owner")})}),(0,s.jsx)(U.Z.Item,{label:t("Storage"),name:"storage",rules:[{required:!0,message:t("Please_select_the_storage")}],children:(0,s.jsxs)(es.default,{className:"mb-5 h-12",placeholder:t("Please_select_the_storage"),children:[(0,s.jsx)(es.default.Option,{value:"VectorStore",children:"Vector Store"}),(0,s.jsx)(es.default.Option,{value:"KnowledgeGraph",children:"Knowledge Graph"}),(0,s.jsx)(es.default.Option,{value:"FullText",children:"Full Text"})]})}),(0,s.jsx)(U.Z.Item,{label:t("Description"),name:"description",rules:[{required:!0,message:t("Please_input_the_description")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:t("Please_input_the_description")})}),(0,s.jsx)(U.Z.Item,{children:(0,s.jsx)(i.ZP,{type:"primary",htmlType:"submit",children:t("Next")})})]})})}function er(e){let{t}=(0,O.$G)(),{handleStepChange:a}=e,l=[{type:"TEXT",title:t("Text"),subTitle:t("Fill your raw text"),iconType:"TEXT"},{type:"URL",title:t("URL"),subTitle:t("Fetch_the_content_of_a_URL"),iconType:"WEBPAGE"},{type:"DOCUMENT",title:t("Document"),subTitle:t("Upload_a_document"),iconType:"DOCUMENT"}];return(0,s.jsx)(s.Fragment,{children:l.map((e,t)=>(0,s.jsxs)(N.Z,{className:"mt-4 mb-4 cursor-pointer",onClick:()=>{a({label:"forward",docType:e.type})},children:[(0,s.jsxs)("div",{className:"font-semibold",children:[(0,s.jsx)(L.Z,{type:e.iconType}),e.title]}),(0,s.jsx)("div",{children:e.subTitle})]},t))})}var ei=a(38925),ec=a(47221),eo=a(16165),ed=a(48928),em=a(84567),eu=a(78045);let{TextArea:ex}=D.default;function eh(e){let{strategies:t,docType:a,fileName:l,field:r}=e,[i,c]=(0,n.useState)(),o="";if("DOCUMENT"===a){let e=l.split(".");o=e[e.length-1]}let d=o?t.filter(e=>e.suffix.indexOf(o)>-1):t,{t:m}=(0,O.$G)(),u={strategy:"Automatic",name:m("Automatic"),desc:m("Automatic_desc")};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(U.Z.Item,{name:[r.name,"chunk_parameters","chunk_strategy"],initialValue:u.strategy,children:(0,s.jsxs)(eu.ZP.Group,{style:{marginTop:16},onChange:function(e){c(e.target.value)},children:[(0,s.jsx)(eu.ZP,{value:u.strategy,children:u.name}),d.map(e=>(0,s.jsx)(eu.ZP,{value:e.strategy,children:e.name},"strategy_radio_".concat(e.strategy)))]})}),function(){if(!i)return null;if(i===u.strategy)return(0,s.jsx)("p",{className:"my-4",children:u.desc});let e=null==d?void 0:d.filter(e=>e.strategy===i)[0].parameters;return e&&e.length?(0,s.jsx)("div",{className:"mt-2",children:null==e?void 0:e.map(e=>(0,s.jsx)(U.Z.Item,{label:e.param_name,name:[r.name,"chunk_parameters",e.param_name],rules:[{required:!0,message:m("Please_input_the_name")}],initialValue:e.default_value,valuePropName:"boolean"===e.param_type?"checked":"value",tooltip:e.description,children:function(e){switch(e){case"int":return(0,s.jsx)(ed.Z,{className:"w-full",min:1});case"string":return(0,s.jsx)(ex,{className:"w-full",rows:2});case"boolean":return(0,s.jsx)(em.Z,{})}}(e.param_type)},"param_".concat(e.param_name)))}):(0,s.jsx)(ei.Z,{className:"my-2",type:"warning",message:m("No_parameter")})}()]})}var ep=a(82353);function e_(e){let{spaceName:t,docType:a,uploadFiles:r,handleStepChange:c}=e,{t:o}=(0,O.$G)(),[d]=U.Z.useForm(),[m,u]=(0,n.useState)(r),[x,h]=(0,n.useState)(),[p,_]=(0,n.useState)([]),[j,f]=(0,n.useState)("");async function g(){var e;h(!0);let[,t]=await (0,C.Vx)((0,C.iZ)());h(!1),_(null===(e=t||[])||void 0===e?void 0:e.filter(e=>e.type.indexOf(a)>-1))}(0,n.useEffect)(()=>(g(),()=>{l&&clearInterval(l)}),[]);let b=async e=>{if(function(e){let t=!0;"RUNNING"===j&&(t=!1,B.ZP.warning("The task is still running, do not submit it again."));let{fileStrategies:a}=e;return a.map(e=>{var t,a;let l=null==e?void 0:null===(t=e.chunk_parameters)||void 0===t?void 0:t.chunk_strategy;l||(e.chunk_parameters={chunk_strategy:"Automatic"});let s=p.filter(e=>e.strategy===l)[0],n={chunk_strategy:null==e?void 0:null===(a=e.chunk_parameters)||void 0===a?void 0:a.chunk_strategy};s&&s.parameters&&s.parameters.forEach(t=>{let a=t.param_name;n[a]=(null==e?void 0:e.chunk_parameters)[a]}),e.chunk_parameters=n}),t}(e)){var a;h(!0);let[,s]=await (0,C.Vx)((0,C.KL)(t,e.fileStrategies));if(h(!1),(null==s?void 0:s.tasks)&&(null==s?void 0:null===(a=s.tasks)||void 0===a?void 0:a.length)>0){B.ZP.success("Segemation task start successfully. task id: ".concat(null==s?void 0:s.tasks.join(","))),f("RUNNING");let t=e.fileStrategies.map(e=>e.doc_id);l=setInterval(async()=>{let e=await N(t);"FINISHED"===e&&(clearInterval(l),f("FINISHED"),B.ZP.success("Congratulation, All files sync successfully."),c({label:"finish"}))},3e3)}}};async function N(e){let[,a]=await (0,C.Vx)((0,C._Q)(t,{doc_ids:e}));if((null==a?void 0:a.data)&&(null==a?void 0:a.data.length)>0){let e=[...m];if(null==a||a.data.map(t=>{var a;let l=null===(a=null==e?void 0:e.filter(e=>e.doc_id===t.id))||void 0===a?void 0:a[0];l&&(l.status=t.status)}),u(e),null==a?void 0:a.data.every(e=>"FINISHED"===e.status||"FAILED"===e.status))return"FINISHED"}}return(0,s.jsx)(w.Z,{spinning:x,children:(0,s.jsxs)(U.Z,{labelCol:{span:6},wrapperCol:{span:18},labelAlign:"right",form:d,size:"large",className:"mt-4",layout:"horizontal",name:"basic",autoComplete:"off",initialValues:{fileStrategies:m},onFinish:b,children:[p&&p.length?(0,s.jsx)(U.Z.List,{name:"fileStrategies",children:e=>{switch(a){case"TEXT":case"URL":return null==e?void 0:e.map(e=>(0,s.jsx)(eh,{strategies:p,docType:a,fileName:m[e.name].name,field:e}));case"DOCUMENT":return(0,s.jsx)(ec.Z,{defaultActiveKey:0,size:m.length>5?"small":"middle",children:null==e?void 0:e.map(e=>(0,s.jsx)(ec.Z.Panel,{header:"".concat(e.name+1,". ").concat(m[e.name].name),extra:function(e){let t=m[e].status;switch(t){case"FINISHED":return(0,s.jsx)(eo.Z,{component:ep.qw});case"RUNNING":return(0,s.jsx)(eo.Z,{className:"animate-spin animate-infinite",component:ep.bn});case"FAILED":return(0,s.jsx)(eo.Z,{component:ep.FE});default:return(0,s.jsx)(eo.Z,{component:ep.tu})}}(e.name),children:(0,s.jsx)(eh,{strategies:p,docType:a,fileName:m[e.name].name,field:e})},e.key))})}}}):(0,s.jsx)(ei.Z,{message:"Cannot find one strategy for ".concat(a," type knowledge."),type:"warning"}),(0,s.jsxs)(U.Z.Item,{className:"mt-4",children:[(0,s.jsx)(i.ZP,{onClick:()=>{c({label:"back"})},className:"mr-4",children:"".concat(o("Back"))}),(0,s.jsx)(i.ZP,{type:"primary",htmlType:"submit",loading:x||"RUNNING"===j,children:o("Process")})]})]})})}var ej=()=>{let[e,t]=(0,n.useState)([]),[a,l]=(0,n.useState)(!1),[d,m]=(0,n.useState)(0),[u,x]=(0,n.useState)(""),[h,p]=(0,n.useState)([]),[_,j]=(0,n.useState)(""),{t:f}=(0,O.$G)(),g=[{title:f("Knowledge_Space_Config")},{title:f("Choose_a_Datasource_type")},{title:f("Upload")},{title:f("Segmentation")}];async function b(){let[e,a]=await (0,C.Vx)((0,C.Vm)());t(a)}(0,n.useEffect)(()=>{b()},[]);let N=e=>{let{label:t,spaceName:a,docType:s,files:n}=e;"finish"===t?(l(!1),b(),x(""),j(""),b()):"forward"===t?(0===d&&b(),m(e=>e+1)):m(e=>e-1),n&&p(n),a&&x(a),s&&j(s)};function y(e){x(e),m(1),l(!0)}return(0,s.jsxs)("div",{className:"bg-[#FAFAFA] dark:bg-transparent w-full h-full",children:[(0,s.jsxs)("div",{className:"page-body p-4 md:p-6 h-full overflow-auto",children:[(0,s.jsx)(i.ZP,{type:"primary",className:"flex items-center",icon:(0,s.jsx)(r.Z,{}),onClick:()=>{l(!0)},children:"Create"}),(0,s.jsx)("div",{className:"flex flex-wrap mt-4 gap-2 md:gap-4",children:null==e?void 0:e.map(e=>(0,s.jsx)($,{space:e,onAddDoc:y,getSpaces:b},e.id))})]}),(0,s.jsxs)(c.default,{title:"Add Knowledge",centered:!0,open:a,destroyOnClose:!0,onCancel:()=>{l(!1)},width:1e3,afterClose:()=>{m(0),b()},footer:null,children:[(0,s.jsx)(o.Z,{current:d,items:g}),0===d&&(0,s.jsx)(en,{handleStepChange:N}),1===d&&(0,s.jsx)(er,{handleStepChange:N}),(0,s.jsx)(el,{className:ee()({hidden:2!==d}),spaceName:u,docType:_,handleStepChange:N}),3===d&&(0,s.jsx)(e_,{spaceName:u,docType:_,uploadFiles:h,handleStepChange:N})]})]})}}},function(e){e.O(0,[4885,2185,5503,1009,9479,785,4553,5813,411,8928,5733,2282,5237,9774,2888,179],function(){return e(e.s=54681)}),_N_E=e.O()}]);
\ No newline at end of file
diff --git a/dbgpt/app/static/_next/static/chunks/pages/knowledge-244aee7ebbad3668.js b/dbgpt/app/static/_next/static/chunks/pages/knowledge-244aee7ebbad3668.js
deleted file mode 100644
index 84ad3d362..000000000
--- a/dbgpt/app/static/_next/static/chunks/pages/knowledge-244aee7ebbad3668.js
+++ /dev/null
@@ -1 +0,0 @@
-(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[8662],{54681:function(e,t,a){(window.__NEXT_P=window.__NEXT_P||[]).push(["/knowledge",function(){return a(18671)}])},26892:function(e,t,a){"use strict";var l=a(85893),s=a(67294),n=a(66309),r=a(83062),i=a(94184),c=a.n(i),o=a(25675),d=a.n(o);t.Z=(0,s.memo)(function(e){let{icon:t,iconBorder:a=!0,title:i,desc:o,tags:m,children:u,disabled:x,operations:h,className:p,..._}=e,j=(0,s.useMemo)(()=>t?"string"==typeof t?(0,l.jsx)(d(),{className:c()("w-11 h-11 rounded-full mr-4 object-contain bg-white",{"border border-gray-200":a}),width:48,height:48,src:t,alt:i}):t:null,[t]),f=(0,s.useMemo)(()=>m&&m.length?(0,l.jsx)("div",{className:"flex items-center mt-1 flex-wrap",children:m.map((e,t)=>{var a;return"string"==typeof e?(0,l.jsx)(n.Z,{className:"text-xs",bordered:!1,color:"default",children:e},t):(0,l.jsx)(n.Z,{className:"text-xs",bordered:null!==(a=e.border)&&void 0!==a&&a,color:e.color,children:e.text},t)})}):null,[m]);return(0,l.jsxs)("div",{className:c()("group/card relative flex flex-col w-72 rounded justify-between text-black bg-white shadow-[0_8px_16px_-10px_rgba(100,100,100,.08)] hover:shadow-[0_14px_20px_-10px_rgba(100,100,100,.15)] dark:bg-[#232734] dark:text-white dark:hover:border-white transition-[transfrom_shadow] duration-300 hover:-translate-y-1 min-h-fit",{"grayscale cursor-no-drop":x,"cursor-pointer":!x&&!!_.onClick},p),..._,children:[(0,l.jsxs)("div",{className:"p-4",children:[(0,l.jsxs)("div",{className:"flex items-center",children:[j,(0,l.jsxs)("div",{className:"flex flex-col",children:[(0,l.jsx)("h2",{className:"text-sm font-semibold",children:i}),f]})]}),o&&(0,l.jsx)(r.Z,{title:o,children:(0,l.jsx)("p",{className:"mt-2 text-sm text-gray-500 font-normal line-clamp-2",children:o})})]}),(0,l.jsxs)("div",{children:[u,h&&!!h.length&&(0,l.jsx)("div",{className:"flex flex-wrap items-center justify-center border-t border-solid border-gray-100 dark:border-theme-dark",children:h.map((e,t)=>(0,l.jsx)(r.Z,{title:e.label,children:(0,l.jsxs)("div",{className:"relative flex flex-1 items-center justify-center h-11 text-gray-400 hover:text-blue-500 transition-colors duration-300 cursor-pointer",onClick:t=>{var a;t.stopPropagation(),null===(a=e.onClick)||void 0===a||a.call(e)},children:[e.children,t{let[e,a]=await (0,C.Vx)((0,C.Tu)(t.name));d(a)};(0,n.useEffect)(()=>{x()},[t.name]);let h=[{key:"Embedding",label:(0,s.jsxs)("div",{children:[(0,s.jsx)(G.Z,{}),r("Embedding")]}),children:(0,s.jsxs)(F.Z,{gutter:24,children:[(0,s.jsx)(A.Z,{span:12,offset:0,children:(0,s.jsx)(U.Z.Item,{tooltip:r("the_top_k_vectors"),rules:[{required:!0}],label:r("topk"),name:["embedding","topk"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12"})})}),(0,s.jsx)(A.Z,{span:12,children:(0,s.jsx)(U.Z.Item,{tooltip:r("Set_a_threshold_score"),rules:[{required:!0}],label:r("recall_score"),name:["embedding","recall_score"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:r("Please_input_the_owner")})})}),(0,s.jsx)(A.Z,{span:12,children:(0,s.jsx)(U.Z.Item,{tooltip:r("recall_type"),rules:[{required:!0}],label:r("recall_type"),name:["embedding","recall_type"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12"})})}),(0,s.jsx)(A.Z,{span:12,children:(0,s.jsx)(U.Z.Item,{tooltip:r("A_model_used"),rules:[{required:!0}],label:r("model"),name:["embedding","model"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12"})})}),(0,s.jsx)(A.Z,{span:12,children:(0,s.jsx)(U.Z.Item,{tooltip:r("The_size_of_the_data_chunks"),rules:[{required:!0}],label:r("chunk_size"),name:["embedding","chunk_size"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12"})})}),(0,s.jsx)(A.Z,{span:12,children:(0,s.jsx)(U.Z.Item,{tooltip:r("The_amount_of_overlap"),rules:[{required:!0}],label:r("chunk_overlap"),name:["embedding","chunk_overlap"],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:r("Please_input_the_description")})})})]})},{key:"Prompt",label:(0,s.jsxs)("div",{children:[(0,s.jsx)(q.Z,{}),r("Prompt")]}),children:(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(U.Z.Item,{tooltip:r("A_contextual_parameter"),label:r("scene"),name:["prompt","scene"],children:(0,s.jsx)(M,{rows:4,className:"mb-2"})}),(0,s.jsx)(U.Z.Item,{tooltip:r("structure_or_format"),label:r("template"),name:["prompt","template"],children:(0,s.jsx)(M,{rows:7,className:"mb-2"})}),(0,s.jsx)(U.Z.Item,{tooltip:r("The_maximum_number_of_tokens"),label:r("max_token"),name:["prompt","max_token"],children:(0,s.jsx)(D.default,{className:"mb-2"})})]})},{key:"Summary",label:(0,s.jsxs)("div",{children:[(0,s.jsx)(z.Z,{}),r("Summary")]}),children:(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(U.Z.Item,{rules:[{required:!0}],label:r("max_iteration"),name:["summary","max_iteration"],children:(0,s.jsx)(D.default,{className:"mb-2"})}),(0,s.jsx)(U.Z.Item,{rules:[{required:!0}],label:r("concurrency_limit"),name:["summary","concurrency_limit"],children:(0,s.jsx)(D.default,{className:"mb-2"})})]})}],p=async e=>{u(!0);let[a,s,n]=await (0,C.Vx)((0,C.iH)(t.name,{argument:JSON.stringify(e)}));u(!1),(null==n?void 0:n.success)&&l(!1)};return(0,s.jsx)(c.default,{width:850,open:a,onCancel:()=>{l(!1)},footer:null,children:(0,s.jsx)(w.Z,{spinning:m,children:(0,s.jsxs)(U.Z,{size:"large",className:"mt-4",layout:"vertical",name:"basic",initialValues:{...o},autoComplete:"off",onFinish:p,children:[(0,s.jsx)(V.Z,{items:h}),(0,s.jsxs)("div",{className:"mt-3 mb-3",children:[(0,s.jsx)(i.ZP,{htmlType:"submit",type:"primary",className:"mr-6",children:r("Submit")}),(0,s.jsx)(i.ZP,{onClick:()=>{l(!1)},children:r("close")})]})]})})})}var L=a(47207);let{confirm:H}=c.default;function K(e){let{space:t}=e,{t:a}=(0,O.$G)(),l=(0,x.useRouter)(),[c,o]=(0,n.useState)(!1),[d,m]=(0,n.useState)([]),[u,p]=(0,n.useState)(!1),[_,j]=(0,n.useState)(0),I=(0,n.useRef)(1),D=(0,n.useMemo)(()=>d.length<_,[d.length,_]),F=e=>{H({title:a("Tips"),icon:(0,s.jsx)(h.Z,{}),content:"".concat(a("Del_Document_Tips"),"?"),okText:"Yes",okType:"danger",cancelText:"No",async onOk(){await G(e)}})};async function A(){o(!0);let[e,a]=await (0,C.Vx)((0,C._Q)(t.name,{page:I.current,page_size:18}));m(null==a?void 0:a.data),j((null==a?void 0:a.total)||0),o(!1)}let U=async()=>{if(!D)return;o(!0),I.current+=1;let[e,a]=await (0,C.Vx)((0,C._Q)(t.name,{page:I.current,page_size:18}));m([...d,...a.data]),o(!1)},V=async(e,t)=>{await (0,C.Vx)((0,C.Hx)(e,{doc_ids:[t]}))},G=async a=>{await (0,C.Vx)((0,C.n3)(t.name,{doc_name:a.doc_name})),A(),e.onDeleteDoc()},q=()=>{e.onAddDoc(t.name)},z=(e,t)=>{let a;switch(e){case"TODO":a="gold";break;case"RUNNING":a="#2db7f5";break;case"FINISHED":a="cyan";break;default:a="red"}return(0,s.jsx)(g.Z,{title:t,children:(0,s.jsx)(b.Z,{color:a,children:e})})};return(0,n.useEffect)(()=>{A()},[t]),(0,s.jsxs)("div",{className:"collapse-container pt-2 px-4",children:[(0,s.jsxs)(v.Z,{children:[(0,s.jsx)(i.ZP,{size:"middle",type:"primary",className:"flex items-center",icon:(0,s.jsx)(r.Z,{}),onClick:q,children:a("Add_Datasource")}),(0,s.jsx)(i.ZP,{size:"middle",className:"flex items-center mx-2",icon:(0,s.jsx)(P.Z,{}),onClick:()=>{p(!0)},children:"Arguments"}),"KnowledgeGraph"===t.vector_type&&(0,s.jsx)(i.ZP,{size:"middle",className:"flex items-center mx-2",icon:(0,s.jsx)(S.Z,{}),onClick:()=>{l.push("/knowledge/graph/?spaceName=".concat(t.name))},children:a("View_Graph")})]}),(0,s.jsx)(y.Z,{}),(0,s.jsx)(w.Z,{spinning:c,children:(null==d?void 0:d.length)>0?(0,s.jsxs)("div",{className:"max-h-96 overflow-auto max-w-3/4",children:[(0,s.jsx)("div",{className:"mt-3 grid grid-cols-1 gap-x-6 gap-y-5 sm:grid-cols-2 lg:grid-cols-3 xl:gap-x-5",children:d.map(e=>(0,s.jsxs)(N.Z,{className:" dark:bg-[#484848] relative shrink-0 grow-0 cursor-pointer rounded-[10px] border border-gray-200 border-solid w-full",title:(0,s.jsx)(g.Z,{title:e.doc_name,children:(0,s.jsxs)("div",{className:"truncate ",children:[(0,s.jsx)(L.Z,{type:e.doc_type}),(0,s.jsx)("span",{children:e.doc_name})]})}),extra:(0,s.jsxs)("div",{className:"mx-3",children:[(0,s.jsx)(g.Z,{title:"detail",children:(0,s.jsx)(k.Z,{className:"mr-2 !text-lg",style:{color:"#1b7eff",fontSize:"20px"},onClick:()=>{l.push("/knowledge/chunk/?spaceName=".concat(t.name,"&id=").concat(e.id))}})}),(0,s.jsx)(g.Z,{title:"Sync",children:(0,s.jsx)(T.Z,{className:"mr-2 !text-lg",style:{color:"#1b7eff",fontSize:"20px"},onClick:()=>{V(t.name,e.id)}})}),(0,s.jsx)(g.Z,{title:"Delete",children:(0,s.jsx)(f.Z,{className:"text-[#ff1b2e] !text-lg",onClick:()=>{F(e)}})})]}),children:[(0,s.jsxs)("p",{className:"mt-2 font-semibold ",children:[a("Size"),":"]}),(0,s.jsxs)("p",{children:[e.chunk_size," chunks"]}),(0,s.jsxs)("p",{className:"mt-2 font-semibold ",children:[a("Last_Sync"),":"]}),(0,s.jsx)("p",{children:E()(e.last_sync).format("YYYY-MM-DD HH:MM:SS")}),(0,s.jsx)("p",{className:"mt-2 mb-2",children:z(e.status,e.result)})]},e.id))}),D&&(0,s.jsx)(y.Z,{children:(0,s.jsx)("span",{className:"cursor-pointer",onClick:U,children:a("Load_more")})})]}):(0,s.jsx)(Z.Z,{image:Z.Z.PRESENTED_IMAGE_DEFAULT,children:(0,s.jsx)(i.ZP,{type:"primary",className:"flex items-center mx-auto",icon:(0,s.jsx)(r.Z,{}),onClick:q,children:"Create Now"})})}),(0,s.jsx)(R,{space:t,argumentsShow:u,setArgumentsShow:p})]})}var Y=a(26892);let{confirm:X}=c.default;function $(e){let t=(0,x.useRouter)(),{t:a}=(0,O.$G)(),{space:l,getSpaces:n}=e,r=()=>{X({title:a("Tips"),icon:(0,s.jsx)(h.Z,{}),content:"".concat(a("Del_Knowledge_Tips"),"?"),okText:"Yes",okType:"danger",cancelText:"No",async onOk(){await (0,C.Vx)((0,C.XK)({name:null==l?void 0:l.name})),n()}})},i=async()=>{let[e,a]=await (0,C.Vx)((0,C.sW)({chat_mode:"chat_knowledge"}));(null==a?void 0:a.conv_uid)&&t.push("/chat?scene=chat_knowledge&id=".concat(null==a?void 0:a.conv_uid,"&db_param=").concat(l.name))};return(0,s.jsx)(d.ZP,{theme:{components:{Popover:{zIndexPopup:90}}},children:(0,s.jsx)(m.Z,{className:"cursor-pointer",placement:"bottom",trigger:"click",content:(0,s.jsx)(K,{space:l,onAddDoc:e.onAddDoc,onDeleteDoc:function(){n()}}),children:(0,s.jsx)(u.Z,{className:"mb-4 min-w-[200px] sm:w-60 lg:w-72",count:l.docs||0,children:(0,s.jsx)(Y.Z,{title:l.name,desc:l.desc,icon:"KnowledgeGraph"===l.vector_type?"/models/knowledge-graph.png":"/models/knowledge-default.jpg",iconBorder:!1,tags:[{text:(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(p.Z,{className:"mr-1"}),null==l?void 0:l.owner]})},{text:(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(_.Z,{className:"mr-1"}),E()(l.gmt_modified).format("YYYY-MM-DD")]})}],operations:[{label:a("Chat"),children:(0,s.jsx)(j.Z,{}),onClick:i},{label:a("Delete"),children:(0,s.jsx)(f.Z,{}),onClick:()=>{r()}}]})})})})}var W=a(84553),B=a(2453),Q=a(64082),J=a(94184),ee=a.n(J);let{Dragger:et}=W.default,{TextArea:ea}=D.default;function el(e){let{className:t,handleStepChange:a,spaceName:l,docType:r}=e,{t:c}=(0,O.$G)(),[o]=U.Z.useForm(),[d,m]=(0,n.useState)(!1),[u,x]=(0,n.useState)([]),h=async e=>{let t;let{docName:s,textSource:n,text:i,webPageUrl:c}=e;switch(m(!0),r){case"URL":[,t]=await (0,C.Vx)((0,C.H_)(l,{doc_name:s,content:c,doc_type:"URL"}));break;case"TEXT":[,t]=await (0,C.Vx)((0,C.H_)(l,{doc_name:s,source:n,content:i,doc_type:"TEXT"}))}return(m(!1),"DOCUMENT"===r&&u.length<1)?B.ZP.error("Upload failed, please re-upload."):"DOCUMENT"===r||t?void a({label:"forward",files:"DOCUMENT"===r?u:[{name:s,doc_id:t||-1}]}):B.ZP.error("Upload failed, please re-upload.")},p=e=>{let{file:t,fileList:a}=e;0===a.length&&o.setFieldValue("originFileObj",null)},_=async e=>{let{onSuccess:t,onError:a,file:s}=e,n=new FormData,r=null==s?void 0:s.name;n.append("doc_name",r),n.append("doc_file",s),n.append("doc_type","DOCUMENT");let[,i]=await (0,C.Vx)((0,C.iG)(l,n));Number.isInteger(i)?(t&&t(i||0),x(e=>(e.push({name:r,doc_id:i||-1}),e))):a&&a({name:"",message:""})},j=()=>(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(U.Z.Item,{label:"".concat(c("Name"),":"),name:"docName",rules:[{required:!0,message:c("Please_input_the_name")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:c("Please_input_the_name")})}),(0,s.jsx)(U.Z.Item,{label:"".concat(c("Text_Source"),":"),name:"textSource",rules:[{required:!0,message:c("Please_input_the_text_source")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:c("Please_input_the_text_source")})}),(0,s.jsx)(U.Z.Item,{label:"".concat(c("Text"),":"),name:"text",rules:[{required:!0,message:c("Please_input_the_description")}],children:(0,s.jsx)(ea,{rows:4})})]}),f=()=>(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(U.Z.Item,{label:"".concat(c("Name"),":"),name:"docName",rules:[{required:!0,message:c("Please_input_the_name")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:c("Please_input_the_name")})}),(0,s.jsx)(U.Z.Item,{label:"".concat(c("Web_Page_URL"),":"),name:"webPageUrl",rules:[{required:!0,message:c("Please_input_the_Web_Page_URL")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:c("Please_input_the_Web_Page_URL")})})]}),g=()=>(0,s.jsx)(s.Fragment,{children:(0,s.jsx)(U.Z.Item,{name:"originFileObj",rules:[{required:!0,message:c("Please_select_file")}],children:(0,s.jsxs)(et,{multiple:!0,onChange:p,maxCount:10,accept:".pdf,.ppt,.pptx,.xls,.xlsx,.doc,.docx,.txt,.md",customRequest:_,children:[(0,s.jsx)("p",{className:"ant-upload-drag-icon",children:(0,s.jsx)(Q.Z,{})}),(0,s.jsx)("p",{style:{color:"rgb(22, 108, 255)",fontSize:"20px"},children:c("Select_or_Drop_file")}),(0,s.jsx)("p",{className:"ant-upload-hint",style:{color:"rgb(22, 108, 255)"},children:"PDF, PowerPoint, Excel, Word, Text, Markdown,"})]})})});return(0,s.jsx)(w.Z,{spinning:d,children:(0,s.jsxs)(U.Z,{form:o,size:"large",className:ee()("mt-4",t),layout:"vertical",name:"basic",initialValues:{remember:!0},autoComplete:"off",onFinish:h,children:[(()=>{switch(r){case"URL":return f();case"DOCUMENT":return g();default:return j()}})(),(0,s.jsxs)(U.Z.Item,{children:[(0,s.jsx)(i.ZP,{onClick:()=>{a({label:"back"})},className:"mr-4",children:"".concat(c("Back"))}),(0,s.jsx)(i.ZP,{type:"primary",loading:d,htmlType:"submit",children:c("Next")})]})]})})}var es=a(51009);function en(e){let{t}=(0,O.$G)(),{handleStepChange:a}=e,[l,r]=(0,n.useState)(!1),c=async e=>{let{spaceName:t,owner:l,description:s,storage:n}=e;r(!0);let[i,c,o]=await (0,C.Vx)((0,C.be)({name:t,vector_type:n,owner:l,desc:s}));r(!1),(null==o?void 0:o.success)&&a({label:"forward",spaceName:t})};return(0,s.jsx)(w.Z,{spinning:l,children:(0,s.jsxs)(U.Z,{size:"large",className:"mt-4",layout:"vertical",name:"basic",initialValues:{remember:!0},autoComplete:"off",onFinish:c,children:[(0,s.jsx)(U.Z.Item,{label:t("Knowledge_Space_Name"),name:"spaceName",rules:[{required:!0,message:t("Please_input_the_name")},()=>({validator:(e,a)=>/[^\u4e00-\u9fa50-9a-zA-Z_-]/.test(a)?Promise.reject(Error(t("the_name_can_only_contain"))):Promise.resolve()})],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:t("Please_input_the_name")})}),(0,s.jsx)(U.Z.Item,{label:t("Owner"),name:"owner",rules:[{required:!0,message:t("Please_input_the_owner")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:t("Please_input_the_owner")})}),(0,s.jsx)(U.Z.Item,{label:t("Storage"),name:"storage",rules:[{required:!0,message:t("Please_select_the_storage")}],children:(0,s.jsxs)(es.default,{className:"mb-5 h-12",placeholder:t("Please_select_the_storage"),children:[(0,s.jsx)(es.default.Option,{value:"VectorStore",children:"Vector Store"}),(0,s.jsx)(es.default.Option,{value:"KnowledgeGraph",children:"Knowledge Graph"})]})}),(0,s.jsx)(U.Z.Item,{label:t("Description"),name:"description",rules:[{required:!0,message:t("Please_input_the_description")}],children:(0,s.jsx)(D.default,{className:"mb-5 h-12",placeholder:t("Please_input_the_description")})}),(0,s.jsx)(U.Z.Item,{children:(0,s.jsx)(i.ZP,{type:"primary",htmlType:"submit",children:t("Next")})})]})})}function er(e){let{t}=(0,O.$G)(),{handleStepChange:a}=e,l=[{type:"TEXT",title:t("Text"),subTitle:t("Fill your raw text"),iconType:"TEXT"},{type:"URL",title:t("URL"),subTitle:t("Fetch_the_content_of_a_URL"),iconType:"WEBPAGE"},{type:"DOCUMENT",title:t("Document"),subTitle:t("Upload_a_document"),iconType:"DOCUMENT"}];return(0,s.jsx)(s.Fragment,{children:l.map((e,t)=>(0,s.jsxs)(N.Z,{className:"mt-4 mb-4 cursor-pointer",onClick:()=>{a({label:"forward",docType:e.type})},children:[(0,s.jsxs)("div",{className:"font-semibold",children:[(0,s.jsx)(L.Z,{type:e.iconType}),e.title]}),(0,s.jsx)("div",{children:e.subTitle})]},t))})}var ei=a(38925),ec=a(47221),eo=a(16165),ed=a(48928),em=a(84567),eu=a(78045);let{TextArea:ex}=D.default;function eh(e){let{strategies:t,docType:a,fileName:l,field:r}=e,[i,c]=(0,n.useState)(),o="";if("DOCUMENT"===a){let e=l.split(".");o=e[e.length-1]}let d=o?t.filter(e=>e.suffix.indexOf(o)>-1):t,{t:m}=(0,O.$G)(),u={strategy:"Automatic",name:m("Automatic"),desc:m("Automatic_desc")};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(U.Z.Item,{name:[r.name,"chunk_parameters","chunk_strategy"],initialValue:u.strategy,children:(0,s.jsxs)(eu.ZP.Group,{style:{marginTop:16},onChange:function(e){c(e.target.value)},children:[(0,s.jsx)(eu.ZP,{value:u.strategy,children:u.name}),d.map(e=>(0,s.jsx)(eu.ZP,{value:e.strategy,children:e.name},"strategy_radio_".concat(e.strategy)))]})}),function(){if(!i)return null;if(i===u.strategy)return(0,s.jsx)("p",{className:"my-4",children:u.desc});let e=null==d?void 0:d.filter(e=>e.strategy===i)[0].parameters;return e&&e.length?(0,s.jsx)("div",{className:"mt-2",children:null==e?void 0:e.map(e=>(0,s.jsx)(U.Z.Item,{label:e.param_name,name:[r.name,"chunk_parameters",e.param_name],rules:[{required:!0,message:m("Please_input_the_name")}],initialValue:e.default_value,valuePropName:"boolean"===e.param_type?"checked":"value",tooltip:e.description,children:function(e){switch(e){case"int":return(0,s.jsx)(ed.Z,{className:"w-full",min:1});case"string":return(0,s.jsx)(ex,{className:"w-full",rows:2});case"boolean":return(0,s.jsx)(em.Z,{})}}(e.param_type)},"param_".concat(e.param_name)))}):(0,s.jsx)(ei.Z,{className:"my-2",type:"warning",message:m("No_parameter")})}()]})}var ep=a(82353);function e_(e){let{spaceName:t,docType:a,uploadFiles:r,handleStepChange:c}=e,{t:o}=(0,O.$G)(),[d]=U.Z.useForm(),[m,u]=(0,n.useState)(r),[x,h]=(0,n.useState)(),[p,_]=(0,n.useState)([]),[j,f]=(0,n.useState)("");async function g(){var e;h(!0);let[,t]=await (0,C.Vx)((0,C.iZ)());h(!1),_(null===(e=t||[])||void 0===e?void 0:e.filter(e=>e.type.indexOf(a)>-1))}(0,n.useEffect)(()=>(g(),()=>{l&&clearInterval(l)}),[]);let b=async e=>{if(function(e){let t=!0;"RUNNING"===j&&(t=!1,B.ZP.warning("The task is still running, do not submit it again."));let{fileStrategies:a}=e;return a.map(e=>{var t,a;let l=null==e?void 0:null===(t=e.chunk_parameters)||void 0===t?void 0:t.chunk_strategy;l||(e.chunk_parameters={chunk_strategy:"Automatic"});let s=p.filter(e=>e.strategy===l)[0],n={chunk_strategy:null==e?void 0:null===(a=e.chunk_parameters)||void 0===a?void 0:a.chunk_strategy};s&&s.parameters&&s.parameters.forEach(t=>{let a=t.param_name;n[a]=(null==e?void 0:e.chunk_parameters)[a]}),e.chunk_parameters=n}),t}(e)){var a;h(!0);let[,s]=await (0,C.Vx)((0,C.KL)(t,e.fileStrategies));if(h(!1),(null==s?void 0:s.tasks)&&(null==s?void 0:null===(a=s.tasks)||void 0===a?void 0:a.length)>0){B.ZP.success("Segemation task start successfully. task id: ".concat(null==s?void 0:s.tasks.join(","))),f("RUNNING");let t=e.fileStrategies.map(e=>e.doc_id);l=setInterval(async()=>{let e=await N(t);"FINISHED"===e&&(clearInterval(l),f("FINISHED"),B.ZP.success("Congratulation, All files sync successfully."),c({label:"finish"}))},3e3)}}};async function N(e){let[,a]=await (0,C.Vx)((0,C._Q)(t,{doc_ids:e}));if((null==a?void 0:a.data)&&(null==a?void 0:a.data.length)>0){let e=[...m];if(null==a||a.data.map(t=>{var a;let l=null===(a=null==e?void 0:e.filter(e=>e.doc_id===t.id))||void 0===a?void 0:a[0];l&&(l.status=t.status)}),u(e),null==a?void 0:a.data.every(e=>"FINISHED"===e.status||"FAILED"===e.status))return"FINISHED"}}return(0,s.jsx)(w.Z,{spinning:x,children:(0,s.jsxs)(U.Z,{labelCol:{span:6},wrapperCol:{span:18},labelAlign:"right",form:d,size:"large",className:"mt-4",layout:"horizontal",name:"basic",autoComplete:"off",initialValues:{fileStrategies:m},onFinish:b,children:[p&&p.length?(0,s.jsx)(U.Z.List,{name:"fileStrategies",children:e=>{switch(a){case"TEXT":case"URL":return null==e?void 0:e.map(e=>(0,s.jsx)(eh,{strategies:p,docType:a,fileName:m[e.name].name,field:e}));case"DOCUMENT":return(0,s.jsx)(ec.Z,{defaultActiveKey:0,size:m.length>5?"small":"middle",children:null==e?void 0:e.map(e=>(0,s.jsx)(ec.Z.Panel,{header:"".concat(e.name+1,". ").concat(m[e.name].name),extra:function(e){let t=m[e].status;switch(t){case"FINISHED":return(0,s.jsx)(eo.Z,{component:ep.qw});case"RUNNING":return(0,s.jsx)(eo.Z,{className:"animate-spin animate-infinite",component:ep.bn});case"FAILED":return(0,s.jsx)(eo.Z,{component:ep.FE});default:return(0,s.jsx)(eo.Z,{component:ep.tu})}}(e.name),children:(0,s.jsx)(eh,{strategies:p,docType:a,fileName:m[e.name].name,field:e})},e.key))})}}}):(0,s.jsx)(ei.Z,{message:"Cannot find one strategy for ".concat(a," type knowledge."),type:"warning"}),(0,s.jsxs)(U.Z.Item,{className:"mt-4",children:[(0,s.jsx)(i.ZP,{onClick:()=>{c({label:"back"})},className:"mr-4",children:"".concat(o("Back"))}),(0,s.jsx)(i.ZP,{type:"primary",htmlType:"submit",loading:x||"RUNNING"===j,children:o("Process")})]})]})})}var ej=()=>{let[e,t]=(0,n.useState)([]),[a,l]=(0,n.useState)(!1),[d,m]=(0,n.useState)(0),[u,x]=(0,n.useState)(""),[h,p]=(0,n.useState)([]),[_,j]=(0,n.useState)(""),{t:f}=(0,O.$G)(),g=[{title:f("Knowledge_Space_Config")},{title:f("Choose_a_Datasource_type")},{title:f("Upload")},{title:f("Segmentation")}];async function b(){let[e,a]=await (0,C.Vx)((0,C.Vm)());t(a)}(0,n.useEffect)(()=>{b()},[]);let N=e=>{let{label:t,spaceName:a,docType:s,files:n}=e;"finish"===t?(l(!1),b(),x(""),j(""),b()):"forward"===t?(0===d&&b(),m(e=>e+1)):m(e=>e-1),n&&p(n),a&&x(a),s&&j(s)};function y(e){x(e),m(1),l(!0)}return(0,s.jsxs)("div",{className:"bg-[#FAFAFA] dark:bg-transparent w-full h-full",children:[(0,s.jsxs)("div",{className:"page-body p-4 md:p-6 h-full overflow-auto",children:[(0,s.jsx)(i.ZP,{type:"primary",className:"flex items-center",icon:(0,s.jsx)(r.Z,{}),onClick:()=>{l(!0)},children:"Create"}),(0,s.jsx)("div",{className:"flex flex-wrap mt-4 gap-2 md:gap-4",children:null==e?void 0:e.map(e=>(0,s.jsx)($,{space:e,onAddDoc:y,getSpaces:b},e.id))})]}),(0,s.jsxs)(c.default,{title:"Add Knowledge",centered:!0,open:a,destroyOnClose:!0,onCancel:()=>{l(!1)},width:1e3,afterClose:()=>{m(0),b()},footer:null,children:[(0,s.jsx)(o.Z,{current:d,items:g}),0===d&&(0,s.jsx)(en,{handleStepChange:N}),1===d&&(0,s.jsx)(er,{handleStepChange:N}),(0,s.jsx)(el,{className:ee()({hidden:2!==d}),spaceName:u,docType:_,handleStepChange:N}),3===d&&(0,s.jsx)(e_,{spaceName:u,docType:_,uploadFiles:h,handleStepChange:N})]})]})}}},function(e){e.O(0,[4885,2185,5503,1009,9479,785,4553,5813,411,8928,5733,2282,5237,9774,2888,179],function(){return e(e.s=54681)}),_N_E=e.O()}]);
\ No newline at end of file
diff --git a/dbgpt/app/static/_next/static/chunks/webpack-4e5527d8038129bd.js b/dbgpt/app/static/_next/static/chunks/webpack-392ef02c5aff6027.js
similarity index 98%
rename from dbgpt/app/static/_next/static/chunks/webpack-4e5527d8038129bd.js
rename to dbgpt/app/static/_next/static/chunks/webpack-392ef02c5aff6027.js
index 304e72e0a..28fa6e965 100644
--- a/dbgpt/app/static/_next/static/chunks/webpack-4e5527d8038129bd.js
+++ b/dbgpt/app/static/_next/static/chunks/webpack-392ef02c5aff6027.js
@@ -1 +1 @@
-!function(){"use strict";var e,t,c,a,n,f,r,d,o,b,u,i,s={},l={};function h(e){var t=l[e];if(void 0!==t)return t.exports;var c=l[e]={id:e,loaded:!1,exports:{}},a=!0;try{s[e].call(c.exports,c,c.exports,h),a=!1}finally{a&&delete l[e]}return c.loaded=!0,c.exports}h.m=s,h.amdO={},e=[],h.O=function(t,c,a,n){if(c){n=n||0;for(var f=e.length;f>0&&e[f-1][2]>n;f--)e[f]=e[f-1];e[f]=[c,a,n];return}for(var r=1/0,f=0;f=n&&Object.keys(h.O).every(function(e){return h.O[e](c[o])})?c.splice(o--,1):(d=!1,n0&&e[f-1][2]>n;f--)e[f]=e[f-1];e[f]=[c,a,n];return}for(var r=1/0,f=0;f=n&&Object.keys(h.O).every(function(e){return h.O[e](c[o])})?c.splice(o--,1):(d=!1,n
\ No newline at end of file
+
\ No newline at end of file
diff --git a/dbgpt/app/static/app/index.html b/dbgpt/app/static/app/index.html
index 7c5dbc200..19880b258 100644
--- a/dbgpt/app/static/app/index.html
+++ b/dbgpt/app/static/app/index.html
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/dbgpt/app/static/chat/index.html b/dbgpt/app/static/chat/index.html
index 1ccce8162..4d84a700f 100644
--- a/dbgpt/app/static/chat/index.html
+++ b/dbgpt/app/static/chat/index.html
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/dbgpt/app/static/database/index.html b/dbgpt/app/static/database/index.html
index eec2d935a..df61974ce 100644
--- a/dbgpt/app/static/database/index.html
+++ b/dbgpt/app/static/database/index.html
@@ -1 +1 @@
-
MySQL Fast, reliable, scalable open-source relational database management system.
MSSQL Powerful, scalable, secure relational database system by Microsoft.
DuckDB In-memory analytical database with efficient query processing.
Sqlite Lightweight embedded relational database with simplicity and portability.
ClickHouse Columnar database for high-performance analytics and real-time queries.
Oracle Robust, scalable, secure relational database widely used in enterprises.
Access Easy-to-use relational database for small-scale applications by Microsoft.
MongoDB Flexible, scalable NoSQL document database for web and mobile apps.
ApacheDoris A new-generation open-source real-time data warehouse.
StarRocks An Open-Source, High-Performance Analytical Database.
DB2 Scalable, secure relational database system developed by IBM.
HBase Distributed, scalable NoSQL database for large structured/semi-structured data.
Redis Fast, versatile in-memory data structure store as cache, DB, or broker.
Cassandra Scalable, fault-tolerant distributed NoSQL database for large data.
Couchbase High-performance NoSQL document database with distributed architecture.
PostgreSQL Powerful open-source relational database with extensibility and SQL standards.
Vertica Vertica is a strongly consistent, ACID-compliant, SQL data warehouse, built for the scale and complexity of today’s data-driven world.
Spark Unified engine for large-scale data analytics.
Hive A distributed fault-tolerant data warehouse system.
Space knowledge analytics.
TuGraph TuGraph is a high-performance graph database jointly developed by Ant Group and Tsinghua University.
\ No newline at end of file
+
MySQL Fast, reliable, scalable open-source relational database management system.
MSSQL Powerful, scalable, secure relational database system by Microsoft.
DuckDB In-memory analytical database with efficient query processing.
Sqlite Lightweight embedded relational database with simplicity and portability.
ClickHouse Columnar database for high-performance analytics and real-time queries.
Oracle Robust, scalable, secure relational database widely used in enterprises.
Access Easy-to-use relational database for small-scale applications by Microsoft.
MongoDB Flexible, scalable NoSQL document database for web and mobile apps.
ApacheDoris A new-generation open-source real-time data warehouse.
StarRocks An Open-Source, High-Performance Analytical Database.
DB2 Scalable, secure relational database system developed by IBM.
HBase Distributed, scalable NoSQL database for large structured/semi-structured data.
Redis Fast, versatile in-memory data structure store as cache, DB, or broker.
Cassandra Scalable, fault-tolerant distributed NoSQL database for large data.
Couchbase High-performance NoSQL document database with distributed architecture.
PostgreSQL Powerful open-source relational database with extensibility and SQL standards.
Vertica Vertica is a strongly consistent, ACID-compliant, SQL data warehouse, built for the scale and complexity of today’s data-driven world.
Spark Unified engine for large-scale data analytics.
Hive A distributed fault-tolerant data warehouse system.
Space knowledge analytics.
TuGraph TuGraph is a high-performance graph database jointly developed by Ant Group and Tsinghua University.
\ No newline at end of file
diff --git a/dbgpt/app/static/flow/canvas/index.html b/dbgpt/app/static/flow/canvas/index.html
index 6e1aa438c..11cb608be 100644
--- a/dbgpt/app/static/flow/canvas/index.html
+++ b/dbgpt/app/static/flow/canvas/index.html
@@ -1 +1 @@
-
Press enter or space to select a node.You can then use the arrow keys to move the node around. Press delete to remove it and escape to cancel.
Press enter or space to select an edge. You can then press delete to remove it or escape to cancel.
\ No newline at end of file
+
Press enter or space to select a node.You can then use the arrow keys to move the node around. Press delete to remove it and escape to cancel.
Press enter or space to select an edge. You can then press delete to remove it or escape to cancel.
\ No newline at end of file
diff --git a/dbgpt/app/static/flow/index.html b/dbgpt/app/static/flow/index.html
index b4757c8c3..0e57bcd3c 100644
--- a/dbgpt/app/static/flow/index.html
+++ b/dbgpt/app/static/flow/index.html
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/dbgpt/app/static/index.html b/dbgpt/app/static/index.html
index 41b6dbeb5..8d4dd1924 100644
--- a/dbgpt/app/static/index.html
+++ b/dbgpt/app/static/index.html
@@ -1 +1 @@
-Quick Start
\ No newline at end of file
+Quick Start
\ No newline at end of file
diff --git a/dbgpt/app/static/knowledge/chunk/index.html b/dbgpt/app/static/knowledge/chunk/index.html
index d13d0b241..21631b8f3 100644
--- a/dbgpt/app/static/knowledge/chunk/index.html
+++ b/dbgpt/app/static/knowledge/chunk/index.html
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/dbgpt/app/static/knowledge/graph/index.html b/dbgpt/app/static/knowledge/graph/index.html
index 51bc38f3c..f2a630bcb 100644
--- a/dbgpt/app/static/knowledge/graph/index.html
+++ b/dbgpt/app/static/knowledge/graph/index.html
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/dbgpt/app/static/knowledge/index.html b/dbgpt/app/static/knowledge/index.html
index a755ebe8c..5c5dad6b9 100644
--- a/dbgpt/app/static/knowledge/index.html
+++ b/dbgpt/app/static/knowledge/index.html
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/dbgpt/app/static/models/index.html b/dbgpt/app/static/models/index.html
index 6bf15b681..ed536cd3c 100644
--- a/dbgpt/app/static/models/index.html
+++ b/dbgpt/app/static/models/index.html
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/dbgpt/app/static/models/knowledge-default.jpg b/dbgpt/app/static/models/knowledge-default.jpg
index 312b49a11..226bd405b 100644
Binary files a/dbgpt/app/static/models/knowledge-default.jpg and b/dbgpt/app/static/models/knowledge-default.jpg differ
diff --git a/dbgpt/app/static/models/knowledge-full-text.jpg b/dbgpt/app/static/models/knowledge-full-text.jpg
new file mode 100644
index 000000000..89e09d54a
Binary files /dev/null and b/dbgpt/app/static/models/knowledge-full-text.jpg differ
diff --git a/dbgpt/app/static/prompt/index.html b/dbgpt/app/static/prompt/index.html
index 20a14bc91..ef46b3cbe 100644
--- a/dbgpt/app/static/prompt/index.html
+++ b/dbgpt/app/static/prompt/index.html
@@ -1 +1 @@
-
Add Prompts Add Prompts template
\ No newline at end of file
+
Add Prompts Add Prompts template
\ No newline at end of file
diff --git a/dbgpt/rag/assembler/db_schema.py b/dbgpt/rag/assembler/db_schema.py
index 18c78c333..209b8d9a3 100644
--- a/dbgpt/rag/assembler/db_schema.py
+++ b/dbgpt/rag/assembler/db_schema.py
@@ -1,13 +1,12 @@
"""DBSchemaAssembler."""
from typing import Any, List, Optional
-from dbgpt.core import Chunk, Embeddings
+from dbgpt.core import Chunk
from dbgpt.datasource.base import BaseConnector
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from ..assembler.base import BaseAssembler
from ..chunk_manager import ChunkParameters
-from ..embedding.embedding_factory import DefaultEmbeddingFactory
+from ..index.base import IndexStoreBase
from ..knowledge.datasource import DatasourceKnowledge
from ..retriever.db_schema import DBSchemaRetriever
@@ -36,36 +35,22 @@ class DBSchemaAssembler(BaseAssembler):
def __init__(
self,
connector: BaseConnector,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
chunk_parameters: Optional[ChunkParameters] = None,
- embedding_model: Optional[str] = None,
- embeddings: Optional[Embeddings] = None,
**kwargs: Any,
) -> None:
"""Initialize with Embedding Assembler arguments.
Args:
connector: (BaseConnector) BaseConnector connection.
- vector_store_connector: (VectorStoreConnector) VectorStoreConnector to use.
+ index_store: (IndexStoreBase) IndexStoreBase to use.
chunk_manager: (Optional[ChunkManager]) ChunkManager to use for chunking.
embedding_model: (Optional[str]) Embedding model to use.
embeddings: (Optional[Embeddings]) Embeddings to use.
"""
knowledge = DatasourceKnowledge(connector)
self._connector = connector
- self._vector_store_connector = vector_store_connector
-
- self._embedding_model = embedding_model
- if self._embedding_model and not embeddings:
- embeddings = DefaultEmbeddingFactory(
- default_model_name=self._embedding_model
- ).create(self._embedding_model)
-
- if (
- embeddings
- and self._vector_store_connector.vector_store_config.embedding_fn is None
- ):
- self._vector_store_connector.vector_store_config.embedding_fn = embeddings
+ self._index_store = index_store
super().__init__(
knowledge=knowledge,
@@ -77,29 +62,23 @@ class DBSchemaAssembler(BaseAssembler):
def load_from_connection(
cls,
connector: BaseConnector,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
chunk_parameters: Optional[ChunkParameters] = None,
- embedding_model: Optional[str] = None,
- embeddings: Optional[Embeddings] = None,
) -> "DBSchemaAssembler":
"""Load document embedding into vector store from path.
Args:
connector: (BaseConnector) BaseConnector connection.
- vector_store_connector: (VectorStoreConnector) VectorStoreConnector to use.
+ index_store: (IndexStoreBase) IndexStoreBase to use.
chunk_parameters: (Optional[ChunkParameters]) ChunkManager to use for
chunking.
- embedding_model: (Optional[str]) Embedding model to use.
- embeddings: (Optional[Embeddings]) Embeddings to use.
Returns:
DBSchemaAssembler
"""
return cls(
connector=connector,
- vector_store_connector=vector_store_connector,
- embedding_model=embedding_model,
+ index_store=index_store,
chunk_parameters=chunk_parameters,
- embeddings=embeddings,
)
def get_chunks(self) -> List[Chunk]:
@@ -112,7 +91,7 @@ class DBSchemaAssembler(BaseAssembler):
Returns:
List[str]: List of chunk ids.
"""
- return self._vector_store_connector.load_document(self._chunks)
+ return self._index_store.load_document(self._chunks)
def _extract_info(self, chunks) -> List[Chunk]:
"""Extract info from chunks."""
@@ -131,5 +110,5 @@ class DBSchemaAssembler(BaseAssembler):
top_k=top_k,
connector=self._connector,
is_embeddings=True,
- vector_store_connector=self._vector_store_connector,
+ index_store=self._index_store,
)
diff --git a/dbgpt/rag/assembler/embedding.py b/dbgpt/rag/assembler/embedding.py
index 095408c3e..57162e469 100644
--- a/dbgpt/rag/assembler/embedding.py
+++ b/dbgpt/rag/assembler/embedding.py
@@ -3,13 +3,13 @@ from concurrent.futures import ThreadPoolExecutor
from typing import Any, List, Optional
from dbgpt.core import Chunk, Embeddings
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from ...util.executor_utils import blocking_func_to_async
from ..assembler.base import BaseAssembler
from ..chunk_manager import ChunkParameters
-from ..embedding.embedding_factory import DefaultEmbeddingFactory
+from ..index.base import IndexStoreBase
from ..knowledge.base import Knowledge
+from ..retriever import BaseRetriever, RetrieverStrategy
from ..retriever.embedding import EmbeddingRetriever
@@ -32,37 +32,26 @@ class EmbeddingAssembler(BaseAssembler):
def __init__(
self,
knowledge: Knowledge,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
chunk_parameters: Optional[ChunkParameters] = None,
- embedding_model: Optional[str] = None,
- embeddings: Optional[Embeddings] = None,
+ retrieve_strategy: Optional[RetrieverStrategy] = RetrieverStrategy.EMBEDDING,
**kwargs: Any,
) -> None:
"""Initialize with Embedding Assembler arguments.
Args:
knowledge: (Knowledge) Knowledge datasource.
- vector_store_connector: (VectorStoreConnector) VectorStoreConnector to use.
+ index_store: (IndexStoreBase) IndexStoreBase to use.
chunk_parameters: (Optional[ChunkParameters]) ChunkManager to use for
chunking.
+ keyword_store: (Optional[IndexStoreBase]) IndexStoreBase to use.
embedding_model: (Optional[str]) Embedding model to use.
embeddings: (Optional[Embeddings]) Embeddings to use.
"""
if knowledge is None:
raise ValueError("knowledge datasource must be provided.")
- self._vector_store_connector = vector_store_connector
-
- self._embedding_model = embedding_model
- if self._embedding_model and not embeddings:
- embeddings = DefaultEmbeddingFactory(
- default_model_name=self._embedding_model
- ).create(self._embedding_model)
-
- if (
- embeddings
- and self._vector_store_connector.vector_store_config.embedding_fn is None
- ):
- self._vector_store_connector.vector_store_config.embedding_fn = embeddings
+ self._index_store = index_store
+ self._retrieve_strategy = retrieve_strategy
super().__init__(
knowledge=knowledge,
@@ -74,52 +63,53 @@ class EmbeddingAssembler(BaseAssembler):
def load_from_knowledge(
cls,
knowledge: Knowledge,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
chunk_parameters: Optional[ChunkParameters] = None,
embedding_model: Optional[str] = None,
embeddings: Optional[Embeddings] = None,
+ retrieve_strategy: Optional[RetrieverStrategy] = RetrieverStrategy.EMBEDDING,
) -> "EmbeddingAssembler":
"""Load document embedding into vector store from path.
Args:
knowledge: (Knowledge) Knowledge datasource.
- vector_store_connector: (VectorStoreConnector) VectorStoreConnector to use.
+ index_store: (IndexStoreBase) IndexStoreBase to use.
chunk_parameters: (Optional[ChunkParameters]) ChunkManager to use for
chunking.
embedding_model: (Optional[str]) Embedding model to use.
embeddings: (Optional[Embeddings]) Embeddings to use.
+ retrieve_strategy: (Optional[RetrieverStrategy]) Retriever strategy.
Returns:
EmbeddingAssembler
"""
return cls(
knowledge=knowledge,
- vector_store_connector=vector_store_connector,
+ index_store=index_store,
chunk_parameters=chunk_parameters,
embedding_model=embedding_model,
embeddings=embeddings,
+ retrieve_strategy=retrieve_strategy,
)
@classmethod
async def aload_from_knowledge(
cls,
knowledge: Knowledge,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
chunk_parameters: Optional[ChunkParameters] = None,
- embedding_model: Optional[str] = None,
- embeddings: Optional[Embeddings] = None,
executor: Optional[ThreadPoolExecutor] = None,
+ retrieve_strategy: Optional[RetrieverStrategy] = RetrieverStrategy.EMBEDDING,
) -> "EmbeddingAssembler":
"""Load document embedding into vector store from path.
Args:
knowledge: (Knowledge) Knowledge datasource.
- vector_store_connector: (VectorStoreConnector) VectorStoreConnector to use.
chunk_parameters: (Optional[ChunkParameters]) ChunkManager to use for
chunking.
- embedding_model: (Optional[str]) Embedding model to use.
- embeddings: (Optional[Embeddings]) Embeddings to use.
+ index_store: (IndexStoreBase) Index store to use.
executor: (Optional[ThreadPoolExecutor) ThreadPoolExecutor to use.
+ retrieve_strategy: (Optional[RetrieverStrategy]) Retriever strategy.
Returns:
EmbeddingAssembler
@@ -129,19 +119,18 @@ class EmbeddingAssembler(BaseAssembler):
executor,
cls,
knowledge,
- vector_store_connector,
+ index_store,
chunk_parameters,
- embedding_model,
- embeddings,
+ retrieve_strategy,
)
def persist(self) -> List[str]:
- """Persist chunks into vector store.
+ """Persist chunks into store.
Returns:
List[str]: List of chunk ids.
"""
- return self._vector_store_connector.load_document(self._chunks)
+ return self._index_store.load_document(self._chunks)
async def apersist(self) -> List[str]:
"""Persist chunks into store.
@@ -149,13 +138,14 @@ class EmbeddingAssembler(BaseAssembler):
Returns:
List[str]: List of chunk ids.
"""
- return await self._vector_store_connector.aload_document(self._chunks)
+ # persist chunks into vector store
+ return await self._index_store.aload_document(self._chunks)
def _extract_info(self, chunks) -> List[Chunk]:
"""Extract info from chunks."""
return []
- def as_retriever(self, top_k: int = 4, **kwargs) -> EmbeddingRetriever:
+ def as_retriever(self, top_k: int = 4, **kwargs) -> BaseRetriever:
"""Create a retriever.
Args:
@@ -165,5 +155,7 @@ class EmbeddingAssembler(BaseAssembler):
EmbeddingRetriever
"""
return EmbeddingRetriever(
- top_k=top_k, vector_store_connector=self._vector_store_connector
+ top_k=top_k,
+ index_store=self._index_store,
+ retrieve_strategy=self._retrieve_strategy,
)
diff --git a/dbgpt/rag/assembler/tests/test_db_struct_assembler.py b/dbgpt/rag/assembler/tests/test_db_struct_assembler.py
index 6bb2d4986..84638b692 100644
--- a/dbgpt/rag/assembler/tests/test_db_struct_assembler.py
+++ b/dbgpt/rag/assembler/tests/test_db_struct_assembler.py
@@ -8,7 +8,7 @@ from dbgpt.rag.chunk_manager import ChunkParameters, SplitterType
from dbgpt.rag.embedding.embedding_factory import EmbeddingFactory
from dbgpt.rag.knowledge.base import Knowledge
from dbgpt.rag.text_splitter.text_splitter import CharacterTextSplitter
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore
@pytest.fixture
@@ -48,7 +48,7 @@ def mock_embedding_factory():
@pytest.fixture
def mock_vector_store_connector():
- return MagicMock(spec=VectorStoreConnector)
+ return MagicMock(spec=ChromaStore)
@pytest.fixture
@@ -70,7 +70,7 @@ def test_load_knowledge(
knowledge=mock_knowledge,
chunk_parameters=mock_chunk_parameters,
embeddings=mock_embedding_factory.create(),
- vector_store_connector=mock_vector_store_connector,
+ index_store=mock_vector_store_connector,
)
assembler.load_knowledge(knowledge=mock_knowledge)
assert len(assembler._chunks) == 0
diff --git a/dbgpt/rag/assembler/tests/test_embedding_assembler.py b/dbgpt/rag/assembler/tests/test_embedding_assembler.py
index 4921b8359..350ccad39 100644
--- a/dbgpt/rag/assembler/tests/test_embedding_assembler.py
+++ b/dbgpt/rag/assembler/tests/test_embedding_assembler.py
@@ -7,7 +7,7 @@ from dbgpt.rag.assembler.db_schema import DBSchemaAssembler
from dbgpt.rag.chunk_manager import ChunkParameters, SplitterType
from dbgpt.rag.embedding.embedding_factory import EmbeddingFactory
from dbgpt.rag.text_splitter.text_splitter import CharacterTextSplitter
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore
@pytest.fixture
@@ -47,7 +47,7 @@ def mock_embedding_factory():
@pytest.fixture
def mock_vector_store_connector():
- return MagicMock(spec=VectorStoreConnector)
+ return MagicMock(spec=ChromaStore)
def test_load_knowledge(
@@ -63,6 +63,6 @@ def test_load_knowledge(
connector=mock_db_connection,
chunk_parameters=mock_chunk_parameters,
embeddings=mock_embedding_factory.create(),
- vector_store_connector=mock_vector_store_connector,
+ index_store=mock_vector_store_connector,
)
assert len(assembler._chunks) == 1
diff --git a/dbgpt/rag/index/base.py b/dbgpt/rag/index/base.py
index e33509c82..a48d6dad4 100644
--- a/dbgpt/rag/index/base.py
+++ b/dbgpt/rag/index/base.py
@@ -2,7 +2,7 @@
import logging
import time
from abc import ABC, abstractmethod
-from concurrent.futures import ThreadPoolExecutor
+from concurrent.futures import Executor, ThreadPoolExecutor
from typing import Any, Dict, List, Optional
from dbgpt._private.pydantic import BaseModel, ConfigDict, Field, model_to_dict
@@ -47,7 +47,7 @@ class IndexStoreConfig(BaseModel):
class IndexStoreBase(ABC):
"""Index store base class."""
- def __init__(self, executor: Optional[ThreadPoolExecutor] = None):
+ def __init__(self, executor: Optional[Executor] = None):
"""Init index store."""
self._executor = executor or ThreadPoolExecutor()
@@ -63,7 +63,7 @@ class IndexStoreBase(ABC):
"""
@abstractmethod
- def aload_document(self, chunks: List[Chunk]) -> List[str]:
+ async def aload_document(self, chunks: List[Chunk]) -> List[str]:
"""Load document in index database.
Args:
@@ -94,7 +94,7 @@ class IndexStoreBase(ABC):
"""
@abstractmethod
- def delete_by_ids(self, ids: str):
+ def delete_by_ids(self, ids: str) -> List[str]:
"""Delete docs.
Args:
diff --git a/dbgpt/rag/operators/db_schema.py b/dbgpt/rag/operators/db_schema.py
index 59d592d12..d0a7c0d9f 100644
--- a/dbgpt/rag/operators/db_schema.py
+++ b/dbgpt/rag/operators/db_schema.py
@@ -5,10 +5,10 @@ from typing import List, Optional
from dbgpt.core import Chunk
from dbgpt.core.interface.operators.retriever import RetrieverOperator
from dbgpt.datasource.base import BaseConnector
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from ..assembler.db_schema import DBSchemaAssembler
from ..chunk_manager import ChunkParameters
+from ..index.base import IndexStoreBase
from ..retriever.db_schema import DBSchemaRetriever
from .assembler import AssemblerOperator
@@ -19,13 +19,13 @@ class DBSchemaRetrieverOperator(RetrieverOperator[str, List[Chunk]]):
Args:
connector (BaseConnector): The connection.
top_k (int, optional): The top k. Defaults to 4.
- vector_store_connector (VectorStoreConnector, optional): The vector store
+ index_store (IndexStoreBase, optional): The vector store
connector. Defaults to None.
"""
def __init__(
self,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
top_k: int = 4,
connector: Optional[BaseConnector] = None,
**kwargs
@@ -35,7 +35,7 @@ class DBSchemaRetrieverOperator(RetrieverOperator[str, List[Chunk]]):
self._retriever = DBSchemaRetriever(
top_k=top_k,
connector=connector,
- vector_store_connector=vector_store_connector,
+ index_store=index_store,
)
def retrieve(self, query: str) -> List[Chunk]:
@@ -53,7 +53,7 @@ class DBSchemaAssemblerOperator(AssemblerOperator[BaseConnector, List[Chunk]]):
def __init__(
self,
connector: BaseConnector,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
chunk_parameters: Optional[ChunkParameters] = None,
**kwargs
):
@@ -61,14 +61,14 @@ class DBSchemaAssemblerOperator(AssemblerOperator[BaseConnector, List[Chunk]]):
Args:
connector (BaseConnector): The connection.
- vector_store_connector (VectorStoreConnector): The vector store connector.
+ index_store (IndexStoreBase): The Storage IndexStoreBase.
chunk_parameters (Optional[ChunkParameters], optional): The chunk
parameters.
"""
if not chunk_parameters:
chunk_parameters = ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
self._chunk_parameters = chunk_parameters
- self._vector_store_connector = vector_store_connector
+ self._index_store = index_store
self._connector = connector
super().__init__(**kwargs)
@@ -84,7 +84,7 @@ class DBSchemaAssemblerOperator(AssemblerOperator[BaseConnector, List[Chunk]]):
assembler = DBSchemaAssembler.load_from_connection(
connector=self._connector,
chunk_parameters=self._chunk_parameters,
- vector_store_connector=self._vector_store_connector,
+ index_store=self._index_store,
)
assembler.persist()
return assembler.get_chunks()
diff --git a/dbgpt/rag/operators/embedding.py b/dbgpt/rag/operators/embedding.py
index c8fab8107..7054f4a04 100644
--- a/dbgpt/rag/operators/embedding.py
+++ b/dbgpt/rag/operators/embedding.py
@@ -6,11 +6,11 @@ from typing import List, Optional, Union
from dbgpt.core import Chunk
from dbgpt.core.awel.flow import IOField, OperatorCategory, Parameter, ViewMetadata
from dbgpt.core.interface.operators.retriever import RetrieverOperator
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from dbgpt.util.i18n_utils import _
from ..assembler.embedding import EmbeddingAssembler
from ..chunk_manager import ChunkParameters
+from ..index.base import IndexStoreBase
from ..knowledge import Knowledge
from ..retriever.embedding import EmbeddingRetriever
from ..retriever.rerank import Ranker
@@ -28,9 +28,9 @@ class EmbeddingRetrieverOperator(RetrieverOperator[Union[str, List[str]], List[C
category=OperatorCategory.RAG,
parameters=[
Parameter.build_from(
- _("Vector Store Connector"),
+ _("Storage Index Store"),
"vector_store_connector",
- VectorStoreConnector,
+ IndexStoreBase,
description=_("The vector store connector."),
),
Parameter.build_from(
@@ -88,7 +88,7 @@ class EmbeddingRetrieverOperator(RetrieverOperator[Union[str, List[str]], List[C
def __init__(
self,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
top_k: int,
score_threshold: float = 0.3,
query_rewrite: Optional[QueryRewrite] = None,
@@ -99,7 +99,7 @@ class EmbeddingRetrieverOperator(RetrieverOperator[Union[str, List[str]], List[C
super().__init__(**kwargs)
self._score_threshold = score_threshold
self._retriever = EmbeddingRetriever(
- vector_store_connector=vector_store_connector,
+ index_store=index_store,
top_k=top_k,
query_rewrite=query_rewrite,
rerank=rerank,
@@ -129,7 +129,7 @@ class EmbeddingAssemblerOperator(AssemblerOperator[Knowledge, List[Chunk]]):
Parameter.build_from(
_("Vector Store Connector"),
"vector_store_connector",
- VectorStoreConnector,
+ IndexStoreBase,
description=_("The vector store connector."),
),
Parameter.build_from(
@@ -164,21 +164,21 @@ class EmbeddingAssemblerOperator(AssemblerOperator[Knowledge, List[Chunk]]):
def __init__(
self,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
chunk_parameters: Optional[ChunkParameters] = None,
**kwargs
):
"""Create a new EmbeddingAssemblerOperator.
Args:
- vector_store_connector (VectorStoreConnector): The vector store connector.
+ index_store (IndexStoreBase): The index storage.
chunk_parameters (Optional[ChunkParameters], optional): The chunk
parameters. Defaults to ChunkParameters(chunk_strategy="CHUNK_BY_SIZE").
"""
if not chunk_parameters:
chunk_parameters = ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
self._chunk_parameters = chunk_parameters
- self._vector_store_connector = vector_store_connector
+ self._index_store = index_store
super().__init__(**kwargs)
def assemble(self, knowledge: Knowledge) -> List[Chunk]:
@@ -186,7 +186,7 @@ class EmbeddingAssemblerOperator(AssemblerOperator[Knowledge, List[Chunk]]):
assembler = EmbeddingAssembler.load_from_knowledge(
knowledge=knowledge,
chunk_parameters=self._chunk_parameters,
- vector_store_connector=self._vector_store_connector,
+ index_store=self._index_store,
)
assembler.persist()
return assembler.get_chunks()
diff --git a/dbgpt/rag/operators/schema_linking.py b/dbgpt/rag/operators/schema_linking.py
index 0bf432edb..d1d14ed6a 100644
--- a/dbgpt/rag/operators/schema_linking.py
+++ b/dbgpt/rag/operators/schema_linking.py
@@ -8,8 +8,8 @@ from typing import Any, Optional
from dbgpt.core import LLMClient
from dbgpt.core.awel import MapOperator
from dbgpt.datasource.base import BaseConnector
+from dbgpt.rag.index.base import IndexStoreBase
from dbgpt.rag.schemalinker.schema_linking import SchemaLinking
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
class SchemaLinkingOperator(MapOperator[Any, Any]):
@@ -21,7 +21,7 @@ class SchemaLinkingOperator(MapOperator[Any, Any]):
model_name: str,
llm: LLMClient,
top_k: int = 5,
- vector_store_connector: Optional[VectorStoreConnector] = None,
+ index_store: Optional[IndexStoreBase] = None,
**kwargs
):
"""Create the schema linking operator.
@@ -37,7 +37,7 @@ class SchemaLinkingOperator(MapOperator[Any, Any]):
connector=connector,
llm=llm,
model_name=model_name,
- vector_store_connector=vector_store_connector,
+ index_store=index_store,
)
async def map(self, query: str) -> str:
diff --git a/dbgpt/rag/retriever/base.py b/dbgpt/rag/retriever/base.py
index 14647c089..759b6e242 100644
--- a/dbgpt/rag/retriever/base.py
+++ b/dbgpt/rag/retriever/base.py
@@ -17,6 +17,7 @@ class RetrieverStrategy(str, Enum):
"""
EMBEDDING = "embedding"
+ GRAPH = "graph"
KEYWORD = "keyword"
HYBRID = "hybrid"
diff --git a/dbgpt/rag/retriever/db_schema.py b/dbgpt/rag/retriever/db_schema.py
index eccf6d733..3c2b1d079 100644
--- a/dbgpt/rag/retriever/db_schema.py
+++ b/dbgpt/rag/retriever/db_schema.py
@@ -4,10 +4,10 @@ from typing import List, Optional, cast
from dbgpt.core import Chunk
from dbgpt.datasource.base import BaseConnector
+from dbgpt.rag.index.base import IndexStoreBase
from dbgpt.rag.retriever.base import BaseRetriever
from dbgpt.rag.retriever.rerank import DefaultRanker, Ranker
from dbgpt.rag.summary.rdbms_db_summary import _parse_db_summary
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from dbgpt.storage.vector_store.filters import MetadataFilters
from dbgpt.util.chat_util import run_async_tasks
@@ -17,7 +17,7 @@ class DBSchemaRetriever(BaseRetriever):
def __init__(
self,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
top_k: int = 4,
connector: Optional[BaseConnector] = None,
query_rewrite: bool = False,
@@ -27,7 +27,7 @@ class DBSchemaRetriever(BaseRetriever):
"""Create DBSchemaRetriever.
Args:
- vector_store_connector (VectorStoreConnector): vector store connector
+ index_store(IndexStore): index connector
top_k (int): top k
connector (Optional[BaseConnector]): RDBMSConnector.
query_rewrite (bool): query rewrite
@@ -67,18 +67,22 @@ class DBSchemaRetriever(BaseRetriever):
connector = _create_temporary_connection()
- vector_store_config = ChromaVectorConfig(name="vector_store_name")
- embedding_model_path = "{your_embedding_model_path}"
embedding_fn = embedding_factory.create(model_name=embedding_model_path)
- vector_connector = VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=vector_store_config,
- embedding_fn=embedding_fn,
+ config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="dbschema_rag_test",
+ embedding_fn=DefaultEmbeddingFactory(
+ default_model_name=os.path.join(
+ MODEL_PATH, "text2vec-large-chinese"
+ ),
+ ).create(),
)
+
+ vector_store = ChromaStore(config)
# get db struct retriever
retriever = DBSchemaRetriever(
top_k=3,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
connector=connector,
)
chunks = retriever.retrieve("show columns from table")
@@ -88,9 +92,9 @@ class DBSchemaRetriever(BaseRetriever):
self._top_k = top_k
self._connector = connector
self._query_rewrite = query_rewrite
- self._vector_store_connector = vector_store_connector
+ self._index_store = index_store
self._need_embeddings = False
- if self._vector_store_connector:
+ if self._index_store:
self._need_embeddings = True
self._rerank = rerank or DefaultRanker(self._top_k)
@@ -109,7 +113,7 @@ class DBSchemaRetriever(BaseRetriever):
if self._need_embeddings:
queries = [query]
candidates = [
- self._vector_store_connector.similar_search(query, self._top_k, filters)
+ self._index_store.similar_search(query, self._top_k, filters)
for query in queries
]
return cast(List[Chunk], reduce(lambda x, y: x + y, candidates))
@@ -185,7 +189,7 @@ class DBSchemaRetriever(BaseRetriever):
self, query, filters: Optional[MetadataFilters] = None
) -> List[Chunk]:
"""Similar search."""
- return self._vector_store_connector.similar_search(query, self._top_k, filters)
+ return self._index_store.similar_search(query, self._top_k, filters)
async def _aparse_db_summary(self) -> List[str]:
"""Similar search."""
diff --git a/dbgpt/rag/retriever/embedding.py b/dbgpt/rag/retriever/embedding.py
index c54eb0d09..ddd161e17 100644
--- a/dbgpt/rag/retriever/embedding.py
+++ b/dbgpt/rag/retriever/embedding.py
@@ -4,10 +4,10 @@ from functools import reduce
from typing import Any, Dict, List, Optional, cast
from dbgpt.core import Chunk
-from dbgpt.rag.retriever.base import BaseRetriever
+from dbgpt.rag.index.base import IndexStoreBase
+from dbgpt.rag.retriever.base import BaseRetriever, RetrieverStrategy
from dbgpt.rag.retriever.rerank import DefaultRanker, Ranker
from dbgpt.rag.retriever.rewrite import QueryRewrite
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from dbgpt.storage.vector_store.filters import MetadataFilters
from dbgpt.util.chat_util import run_async_tasks
from dbgpt.util.tracer import root_tracer
@@ -18,18 +18,19 @@ class EmbeddingRetriever(BaseRetriever):
def __init__(
self,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
top_k: int = 4,
query_rewrite: Optional[QueryRewrite] = None,
rerank: Optional[Ranker] = None,
+ retrieve_strategy: Optional[RetrieverStrategy] = RetrieverStrategy.EMBEDDING,
):
"""Create EmbeddingRetriever.
Args:
+ index_store(IndexStore): vector store connector
top_k (int): top k
query_rewrite (Optional[QueryRewrite]): query rewrite
rerank (Ranker): rerank
- vector_store_connector (VectorStoreConnector): vector store connector
Examples:
.. code-block:: python
@@ -64,8 +65,9 @@ class EmbeddingRetriever(BaseRetriever):
"""
self._top_k = top_k
self._query_rewrite = query_rewrite
- self._vector_store_connector = vector_store_connector
+ self._index_store = index_store
self._rerank = rerank or DefaultRanker(self._top_k)
+ self._retrieve_strategy = retrieve_strategy
def load_document(self, chunks: List[Chunk], **kwargs: Dict[str, Any]) -> List[str]:
"""Load document in vector database.
@@ -75,7 +77,7 @@ class EmbeddingRetriever(BaseRetriever):
Return:
List[str]: chunk ids.
"""
- return self._vector_store_connector.load_document(chunks)
+ return self._index_store.load_document(chunks)
def _retrieve(
self, query: str, filters: Optional[MetadataFilters] = None
@@ -90,7 +92,7 @@ class EmbeddingRetriever(BaseRetriever):
"""
queries = [query]
candidates = [
- self._vector_store_connector.similar_search(query, self._top_k, filters)
+ self._index_store.similar_search(query, self._top_k, filters)
for query in queries
]
res_candidates = cast(List[Chunk], reduce(lambda x, y: x + y, candidates))
@@ -113,7 +115,7 @@ class EmbeddingRetriever(BaseRetriever):
"""
queries = [query]
candidates_with_score = [
- self._vector_store_connector.similar_search_with_scores(
+ self._index_store.similar_search_with_scores(
query, self._top_k, score_threshold, filters
)
for query in queries
@@ -217,7 +219,7 @@ class EmbeddingRetriever(BaseRetriever):
self, query, filters: Optional[MetadataFilters] = None
) -> List[Chunk]:
"""Similar search."""
- return self._vector_store_connector.similar_search(query, self._top_k, filters)
+ return self._index_store.similar_search(query, self._top_k, filters)
async def _run_async_tasks(self, tasks) -> List[Chunk]:
"""Run async tasks."""
@@ -229,6 +231,6 @@ class EmbeddingRetriever(BaseRetriever):
self, query, score_threshold, filters: Optional[MetadataFilters] = None
) -> List[Chunk]:
"""Similar search with score."""
- return await self._vector_store_connector.asimilar_search_with_scores(
+ return await self._index_store.asimilar_search_with_scores(
query, self._top_k, score_threshold, filters
)
diff --git a/dbgpt/rag/retriever/tests/test_db_struct.py b/dbgpt/rag/retriever/tests/test_db_struct.py
index 5138d8510..4cda20365 100644
--- a/dbgpt/rag/retriever/tests/test_db_struct.py
+++ b/dbgpt/rag/retriever/tests/test_db_struct.py
@@ -25,7 +25,7 @@ def mock_vector_store_connector():
def dbstruct_retriever(mock_db_connection, mock_vector_store_connector):
return DBSchemaRetriever(
connector=mock_db_connection,
- vector_store_connector=mock_vector_store_connector,
+ index_store=mock_vector_store_connector,
)
diff --git a/dbgpt/rag/retriever/tests/test_embedding.py b/dbgpt/rag/retriever/tests/test_embedding.py
index 14e278285..9a95a6f3d 100644
--- a/dbgpt/rag/retriever/tests/test_embedding.py
+++ b/dbgpt/rag/retriever/tests/test_embedding.py
@@ -25,8 +25,8 @@ def mock_vector_store_connector():
def embedding_retriever(top_k, mock_vector_store_connector):
return EmbeddingRetriever(
top_k=top_k,
- query_rewrite=False,
- vector_store_connector=mock_vector_store_connector,
+ query_rewrite=None,
+ index_store=mock_vector_store_connector,
)
diff --git a/dbgpt/rag/retriever/time_weighted.py b/dbgpt/rag/retriever/time_weighted.py
index 722db6c69..a544de396 100644
--- a/dbgpt/rag/retriever/time_weighted.py
+++ b/dbgpt/rag/retriever/time_weighted.py
@@ -7,9 +7,9 @@ from typing import Any, Dict, List, Optional, Tuple
from dbgpt.core import Chunk
from dbgpt.rag.retriever.rerank import Ranker
from dbgpt.rag.retriever.rewrite import QueryRewrite
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from dbgpt.storage.vector_store.filters import MetadataFilters
+from ..index.base import IndexStoreBase
from .embedding import EmbeddingRetriever
@@ -23,7 +23,7 @@ class TimeWeightedEmbeddingRetriever(EmbeddingRetriever):
def __init__(
self,
- vector_store_connector: VectorStoreConnector,
+ index_store: IndexStoreBase,
top_k: int = 100,
query_rewrite: Optional[QueryRewrite] = None,
rerank: Optional[Ranker] = None,
@@ -32,13 +32,13 @@ class TimeWeightedEmbeddingRetriever(EmbeddingRetriever):
"""Initialize TimeWeightedEmbeddingRetriever.
Args:
- vector_store_connector (VectorStoreConnector): vector store connector
+ index_store (IndexStoreBase): vector store connector
top_k (int): top k
query_rewrite (Optional[QueryRewrite]): query rewrite
rerank (Ranker): rerank
"""
super().__init__(
- vector_store_connector=vector_store_connector,
+ index_store=index_store,
top_k=top_k,
query_rewrite=query_rewrite,
rerank=rerank,
@@ -69,7 +69,7 @@ class TimeWeightedEmbeddingRetriever(EmbeddingRetriever):
doc.metadata["created_at"] = current_time
doc.metadata["buffer_idx"] = len(self.memory_stream) + i
self.memory_stream.extend(dup_docs)
- return self._vector_store_connector.load_document(dup_docs)
+ return self._index_store.load_document(dup_docs)
def _retrieve(
self, query: str, filters: Optional[MetadataFilters] = None
@@ -125,7 +125,7 @@ class TimeWeightedEmbeddingRetriever(EmbeddingRetriever):
def get_salient_docs(self, query: str) -> Dict[int, Tuple[Chunk, float]]:
"""Return documents that are salient to the query."""
docs_and_scores: List[Chunk]
- docs_and_scores = self._vector_store_connector.similar_search_with_scores(
+ docs_and_scores = self._index_store.similar_search_with_scores(
query, topk=self._top_k, score_threshold=0
)
results = {}
diff --git a/dbgpt/rag/schemalinker/schema_linking.py b/dbgpt/rag/schemalinker/schema_linking.py
index a83c5ca17..4bfd3f6ed 100644
--- a/dbgpt/rag/schemalinker/schema_linking.py
+++ b/dbgpt/rag/schemalinker/schema_linking.py
@@ -13,7 +13,7 @@ from dbgpt.core import (
from dbgpt.datasource.base import BaseConnector
from dbgpt.rag.schemalinker.base_linker import BaseSchemaLinker
from dbgpt.rag.summary.rdbms_db_summary import _parse_db_summary
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.util.chat_util import run_async_tasks
INSTRUCTION = """
diff --git a/dbgpt/rag/summary/db_summary_client.py b/dbgpt/rag/summary/db_summary_client.py
index e77560477..de5ee83ff 100644
--- a/dbgpt/rag/summary/db_summary_client.py
+++ b/dbgpt/rag/summary/db_summary_client.py
@@ -48,8 +48,8 @@ class DBSummaryClient:
def get_db_summary(self, dbname, query, topk):
"""Get user query related tables info."""
+ from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.storage.vector_store.base import VectorStoreConfig
- from dbgpt.storage.vector_store.connector import VectorStoreConnector
vector_store_config = VectorStoreConfig(name=dbname + "_profile")
vector_connector = VectorStoreConnector.from_default(
@@ -60,7 +60,7 @@ class DBSummaryClient:
from dbgpt.rag.retriever.db_schema import DBSchemaRetriever
retriever = DBSchemaRetriever(
- top_k=topk, vector_store_connector=vector_connector
+ top_k=topk, index_store=vector_connector.index_client
)
table_docs = retriever.retrieve(query)
ans = [d.content for d in table_docs]
@@ -88,8 +88,8 @@ class DBSummaryClient:
dbname(str): dbname
"""
vector_store_name = dbname + "_profile"
+ from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.storage.vector_store.base import VectorStoreConfig
- from dbgpt.storage.vector_store.connector import VectorStoreConnector
vector_store_config = VectorStoreConfig(name=vector_store_name)
vector_connector = VectorStoreConnector.from_default(
@@ -102,7 +102,7 @@ class DBSummaryClient:
db_assembler = DBSchemaAssembler.load_from_connection(
connector=db_summary_client.db,
- vector_store_connector=vector_connector,
+ index_store=vector_connector.index_client,
)
if len(db_assembler.get_chunks()) > 0:
@@ -114,8 +114,8 @@ class DBSummaryClient:
def delete_db_profile(self, dbname):
"""Delete db profile."""
vector_store_name = dbname + "_profile"
+ from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.storage.vector_store.base import VectorStoreConfig
- from dbgpt.storage.vector_store.connector import VectorStoreConnector
vector_store_config = VectorStoreConfig(name=vector_store_name)
vector_connector = VectorStoreConnector.from_default(
diff --git a/dbgpt/serve/datasource/service/service.py b/dbgpt/serve/datasource/service/service.py
index 44c9c8274..fa3778326 100644
--- a/dbgpt/serve/datasource/service/service.py
+++ b/dbgpt/serve/datasource/service/service.py
@@ -13,10 +13,10 @@ from dbgpt.datasource.manages.connect_config_db import (
ConnectConfigEntity,
)
from dbgpt.serve.core import BaseService
+from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.storage.metadata import BaseDao
from dbgpt.storage.schema import DBType
from dbgpt.storage.vector_store.base import VectorStoreConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from dbgpt.util.executor_utils import ExecutorFactory
from ..api.schemas import DatasourceServeRequest, DatasourceServeResponse
diff --git a/dbgpt/serve/prompt/api/endpoints.py b/dbgpt/serve/prompt/api/endpoints.py
index 2da890eb8..87e1ac3a9 100644
--- a/dbgpt/serve/prompt/api/endpoints.py
+++ b/dbgpt/serve/prompt/api/endpoints.py
@@ -66,24 +66,24 @@ async def check_api_key(
if request.url.path.startswith(f"/api/v1"):
return None
- if service.config.api_keys:
- api_keys = _parse_api_keys(service.config.api_keys)
- if auth is None or (token := auth.credentials) not in api_keys:
- raise HTTPException(
- status_code=401,
- detail={
- "error": {
- "message": "",
- "type": "invalid_request_error",
- "param": None,
- "code": "invalid_api_key",
- }
- },
- )
- return token
- else:
- # api_keys not set; allow all
- return None
+ # if service.config.api_keys:
+ # api_keys = _parse_api_keys(service.config.api_keys)
+ # if auth is None or (token := auth.credentials) not in api_keys:
+ # raise HTTPException(
+ # status_code=401,
+ # detail={
+ # "error": {
+ # "message": "",
+ # "type": "invalid_request_error",
+ # "param": None,
+ # "code": "invalid_api_key",
+ # }
+ # },
+ # )
+ # return token
+ # else:
+ # # api_keys not set; allow all
+ # return None
@router.get("/health")
diff --git a/dbgpt/serve/prompt/tests/test_endpoints.py b/dbgpt/serve/prompt/tests/test_endpoints.py
index 9bde556b8..f9fd1db74 100644
--- a/dbgpt/serve/prompt/tests/test_endpoints.py
+++ b/dbgpt/serve/prompt/tests/test_endpoints.py
@@ -78,18 +78,6 @@ async def test_api_health(client: AsyncClient, asystem_app, has_auth: bool):
if has_auth:
assert response.status_code == 200
assert response.json() == {"status": "ok"}
- else:
- assert response.status_code == 401
- assert response.json() == {
- "detail": {
- "error": {
- "message": "",
- "type": "invalid_request_error",
- "param": None,
- "code": "invalid_api_key",
- }
- }
- }
@pytest.mark.asyncio
diff --git a/dbgpt/storage/vector_store/connector.py b/dbgpt/serve/rag/connector.py
similarity index 92%
rename from dbgpt/storage/vector_store/connector.py
rename to dbgpt/serve/rag/connector.py
index 4cd5bddd3..cb621b56c 100644
--- a/dbgpt/storage/vector_store/connector.py
+++ b/dbgpt/serve/rag/connector.py
@@ -60,20 +60,20 @@ def _load_vector_options() -> List[OptionValue]:
class VectorStoreConnector:
"""The connector for vector store.
- VectorStoreConnector, can connect different vector db provided load document api_v1
- and similar search api_v1.
+ VectorStoreConnector, can connect different vector db provided load document api_v1
+ and similar search api_v1.
- 1.load_document:knowledge document source into vector store.(Chroma, Milvus,
- Weaviate).
- 2.similar_search: similarity search from vector_store.
- 3.similar_search_with_scores: similarity search with similarity score from
- vector_store
+ 1.load_document:knowledge document source into vector store.(Chroma, Milvus,
+ Weaviate).
+ 2.similar_search: similarity search from vector_store.
+ 3.similar_search_with_scores: similarity search with similarity score from
+ vector_store
- code example:
- >>> from dbgpt.storage.vector_store.connector import VectorStoreConnector
-
- >>> vector_store_config = VectorStoreConfig
- >>> vector_store_connector = VectorStoreConnector(vector_store_type="Chroma")
+ code example:
+ >>> from dbgpt.serve.rag.connector import VectorStoreConnector
+ l
+ >>> vector_store_config = VectorStoreConfig
+ >>> vector_store_connector = VectorStoreConnector(vector_store_type="Chroma")
"""
def __init__(
@@ -140,6 +140,10 @@ class VectorStoreConnector:
real_vector_store_type = cast(str, vector_store_type)
return cls(real_vector_store_type, vector_store_config)
+ @property
+ def index_client(self):
+ return self.client
+
def load_document(self, chunks: List[Chunk]) -> List[str]:
"""Load document in vector database.
diff --git a/dbgpt/serve/rag/operators/knowledge_space.py b/dbgpt/serve/rag/operators/knowledge_space.py
index 52cb2eeb4..c37495ed5 100644
--- a/dbgpt/serve/rag/operators/knowledge_space.py
+++ b/dbgpt/serve/rag/operators/knowledge_space.py
@@ -25,8 +25,8 @@ from dbgpt.core.awel.task.base import IN, OUT
from dbgpt.core.interface.operators.prompt_operator import BasePromptBuilderOperator
from dbgpt.rag.embedding.embedding_factory import EmbeddingFactory
from dbgpt.rag.retriever.embedding import EmbeddingRetriever
+from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.storage.vector_store.base import VectorStoreConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from dbgpt.util.function_utils import rearrange_args_by_type
from dbgpt.util.i18n_utils import _
diff --git a/dbgpt/serve/rag/retriever/knowledge_space.py b/dbgpt/serve/rag/retriever/knowledge_space.py
index 6f2e3899e..6711c36db 100644
--- a/dbgpt/serve/rag/retriever/knowledge_space.py
+++ b/dbgpt/serve/rag/retriever/knowledge_space.py
@@ -6,7 +6,7 @@ from dbgpt.configs.model_config import EMBEDDING_MODEL_CONFIG
from dbgpt.core import Chunk
from dbgpt.rag.embedding.embedding_factory import EmbeddingFactory
from dbgpt.rag.retriever.base import BaseRetriever
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.storage.vector_store.filters import MetadataFilters
from dbgpt.util.executor_utils import ExecutorFactory, blocking_func_to_async
diff --git a/dbgpt/serve/rag/service/service.py b/dbgpt/serve/rag/service/service.py
index 8d7f55fc5..f18f4feaa 100644
--- a/dbgpt/serve/rag/service/service.py
+++ b/dbgpt/serve/rag/service/service.py
@@ -22,7 +22,7 @@ from dbgpt.configs.model_config import (
EMBEDDING_MODEL_CONFIG,
KNOWLEDGE_UPLOAD_ROOT_PATH,
)
-from dbgpt.core import Chunk, LLMClient
+from dbgpt.core import LLMClient
from dbgpt.core.awel.dag.dag_manager import DAGManager
from dbgpt.model import DefaultLLMClient
from dbgpt.model.cluster import WorkerManagerFactory
@@ -31,12 +31,11 @@ from dbgpt.rag.chunk_manager import ChunkParameters
from dbgpt.rag.embedding import EmbeddingFactory
from dbgpt.rag.knowledge import ChunkStrategy, KnowledgeFactory, KnowledgeType
from dbgpt.serve.core import BaseService
+from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.storage.metadata import BaseDao
from dbgpt.storage.metadata._base_dao import QUERY_SPEC
from dbgpt.storage.vector_store.base import VectorStoreConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
from dbgpt.util.dbgpts.loader import DBGPTsLoader
-from dbgpt.util.executor_utils import ExecutorFactory
from dbgpt.util.pagination_utils import PaginationResult
from dbgpt.util.tracer import root_tracer, trace
@@ -481,7 +480,6 @@ class Service(BaseService[KnowledgeSpaceEntity, SpaceServeRequest, SpaceServeRes
)
)
logger.info(f"begin save document chunks, doc:{doc.doc_name}")
- # return chunk_docs
@trace("async_doc_embedding")
async def async_doc_embedding(
@@ -495,7 +493,7 @@ class Service(BaseService[KnowledgeSpaceEntity, SpaceServeRequest, SpaceServeRes
- doc: doc
"""
- logger.info(f"async doc embedding sync, doc:{doc.doc_name}")
+ logger.info(f"async doc persist sync, doc:{doc.doc_name}")
try:
with root_tracer.start_span(
"app.knowledge.assembler.persist",
@@ -503,17 +501,17 @@ class Service(BaseService[KnowledgeSpaceEntity, SpaceServeRequest, SpaceServeRes
):
assembler = await EmbeddingAssembler.aload_from_knowledge(
knowledge=knowledge,
+ index_store=vector_store_connector.index_client,
chunk_parameters=chunk_parameters,
- vector_store_connector=vector_store_connector,
)
chunk_docs = assembler.get_chunks()
doc.chunk_size = len(chunk_docs)
vector_ids = await assembler.apersist()
doc.status = SyncStatus.FINISHED.name
- doc.result = "document embedding success"
+ doc.result = "document persist into index store success"
if vector_ids is not None:
doc.vector_ids = ",".join(vector_ids)
- logger.info(f"async document embedding, success:{doc.doc_name}")
+ logger.info(f"async document persist index store success:{doc.doc_name}")
# save chunk details
chunk_entities = [
DocumentChunkEntity(
diff --git a/dbgpt/storage/full_text/__init__.py b/dbgpt/storage/full_text/__init__.py
new file mode 100644
index 000000000..bb4066441
--- /dev/null
+++ b/dbgpt/storage/full_text/__init__.py
@@ -0,0 +1 @@
+"""Full Text Store Module."""
diff --git a/dbgpt/storage/full_text/base.py b/dbgpt/storage/full_text/base.py
new file mode 100644
index 000000000..6643f2373
--- /dev/null
+++ b/dbgpt/storage/full_text/base.py
@@ -0,0 +1,69 @@
+"""Full text store base class."""
+import logging
+from abc import abstractmethod
+from concurrent.futures import Executor
+from typing import List, Optional
+
+from dbgpt.core import Chunk
+from dbgpt.rag.index.base import IndexStoreBase
+from dbgpt.storage.vector_store.filters import MetadataFilters
+from dbgpt.util.executor_utils import blocking_func_to_async
+
+logger = logging.getLogger(__name__)
+
+
+class FullTextStoreBase(IndexStoreBase):
+ """Graph store base class."""
+
+ def __init__(self, executor: Optional[Executor] = None):
+ """Initialize vector store."""
+ super().__init__(executor)
+
+ @abstractmethod
+ def load_document(self, chunks: List[Chunk]) -> List[str]:
+ """Load document in index database.
+
+ Args:
+ chunks(List[Chunk]): document chunks.
+ Return:
+ List[str]: chunk ids.
+ """
+
+ async def aload_document(self, chunks: List[Chunk]) -> List[str]:
+ """Async load document in index database.
+
+ Args:
+ chunks(List[Chunk]): document chunks.
+ Return:
+ List[str]: chunk ids.
+ """
+ return await blocking_func_to_async(self._executor, self.load_document, chunks)
+
+ @abstractmethod
+ def similar_search_with_scores(
+ self,
+ text,
+ topk,
+ score_threshold: float,
+ filters: Optional[MetadataFilters] = None,
+ ) -> List[Chunk]:
+ """Similar search with scores in index database.
+
+ Args:
+ text(str): The query text.
+ topk(int): The number of similar documents to return.
+ score_threshold(int): score_threshold: Optional, a floating point value
+ between 0 to 1
+ filters(Optional[MetadataFilters]): metadata filters.
+ """
+
+ @abstractmethod
+ def delete_by_ids(self, ids: str) -> List[str]:
+ """Delete docs.
+
+ Args:
+ ids(str): The vector ids to delete, separated by comma.
+ """
+
+ def delete_vector_name(self, index_name: str):
+ """Delete name."""
diff --git a/dbgpt/storage/full_text/elasticsearch.py b/dbgpt/storage/full_text/elasticsearch.py
new file mode 100644
index 000000000..bfa8dd7a5
--- /dev/null
+++ b/dbgpt/storage/full_text/elasticsearch.py
@@ -0,0 +1,227 @@
+"""Elasticsearch document store."""
+import json
+import os
+from concurrent.futures import Executor, ThreadPoolExecutor
+from typing import List, Optional
+
+from dbgpt.core import Chunk
+from dbgpt.rag.index.base import logger
+from dbgpt.storage.full_text.base import FullTextStoreBase
+from dbgpt.storage.vector_store.elastic_store import ElasticsearchVectorConfig
+from dbgpt.storage.vector_store.filters import MetadataFilters
+from dbgpt.util import string_utils
+from dbgpt.util.executor_utils import blocking_func_to_async
+
+
+class ElasticDocumentConfig(ElasticsearchVectorConfig):
+ """Elasticsearch document store config."""
+
+ k1: Optional[float] = 2.0
+ b: Optional[float] = 0.75
+
+
+class ElasticDocumentStore(FullTextStoreBase):
+ """Elasticsearch index store."""
+
+ def __init__(
+ self, es_config: ElasticDocumentConfig, executor: Optional[Executor] = None
+ ):
+ """Init elasticsearch index store.
+
+ refer https://www.elastic.co/guide/en/elasticsearch/reference/8.9/index-
+ modules-similarity.html
+ TF/IDF based similarity that has built-in tf normalization and is supposed to
+ work better for short fields (like names). See Okapi_BM25 for more details.
+ This similarity has the following options:
+ """
+ super().__init__()
+ from elasticsearch import Elasticsearch
+
+ self._es_config = es_config
+ self._es_url = es_config.uri or os.getenv("ELASTICSEARCH_URL", "localhost")
+ self._es_port = es_config.port or os.getenv("ELASTICSEARCH_PORT", "9200")
+ self._es_username = es_config.user or os.getenv("ELASTICSEARCH_USER", "elastic")
+ self._es_password = es_config.password or os.getenv(
+ "ELASTICSEARCH_PASSWORD", "dbgpt"
+ )
+ self._index_name = es_config.name.lower()
+ if string_utils.contains_chinese(es_config.name):
+ bytes_str = es_config.name.encode("utf-8")
+ hex_str = bytes_str.hex()
+ self._index_name = "dbgpt_" + hex_str
+ # k1 (Optional[float]): Controls non-linear term frequency normalization
+ # (saturation). The default value is 2.0.
+ self._k1 = es_config.k1 or 2.0
+ # b (Optional[float]): Controls to what degree document length normalizes
+ # tf values. The default value is 0.75.
+ self._b = es_config.b or 0.75
+ if self._es_username and self._es_password:
+ self._es_client = Elasticsearch(
+ hosts=[f"http://{self._es_url}:{self._es_port}"],
+ basic_auth=(self._es_username, self._es_password),
+ )
+ else:
+ self._es_client = Elasticsearch(
+ hosts=[f"http://{self._es_url}:{self._es_port}"],
+ )
+ self._es_index_settings = {
+ "analysis": {"analyzer": {"default": {"type": "standard"}}},
+ "similarity": {
+ "custom_bm25": {
+ "type": "BM25",
+ "k1": self._k1,
+ "b": self._b,
+ }
+ },
+ }
+ self._es_mappings = {
+ "properties": {
+ "content": {
+ "type": "text",
+ "similarity": "custom_bm25",
+ },
+ "metadata": {
+ "type": "keyword",
+ },
+ }
+ }
+
+ if not self._es_client.indices.exists(index=self._index_name):
+ self._es_client.indices.create(
+ index=self._index_name,
+ mappings=self._es_mappings,
+ settings=self._es_index_settings,
+ )
+ self._executor = executor or ThreadPoolExecutor()
+
+ def load_document(self, chunks: List[Chunk]) -> List[str]:
+ """Load document in elasticsearch.
+
+ Args:
+ chunks(List[Chunk]): document chunks.
+
+ Return:
+ List[str]: chunk ids.
+ """
+ try:
+ from elasticsearch.helpers import bulk
+ except ImportError:
+ raise ValueError("Please install package `pip install elasticsearch`.")
+ es_requests = []
+ ids = []
+ contents = [chunk.content for chunk in chunks]
+ metadatas = [json.dumps(chunk.metadata) for chunk in chunks]
+ chunk_ids = [chunk.chunk_id for chunk in chunks]
+ for i, content in enumerate(contents):
+ es_request = {
+ "_op_type": "index",
+ "_index": self._index_name,
+ "content": content,
+ "metadata": metadatas[i],
+ "_id": chunk_ids[i],
+ }
+ ids.append(chunk_ids[i])
+ es_requests.append(es_request)
+ bulk(self._es_client, es_requests)
+ self._es_client.indices.refresh(index=self._index_name)
+ return ids
+
+ def similar_search(
+ self, text: str, topk: int, filters: Optional[MetadataFilters] = None
+ ) -> List[Chunk]:
+ """Search similar text.
+
+ Args:
+ text(str): text.
+ topk(int): topk.
+ filters(MetadataFilters): filters.
+
+ Return:
+ List[Chunk]: similar text.
+ """
+ es_query = {"query": {"match": {"content": text}}}
+ res = self._es_client.search(index=self._index_name, body=es_query)
+
+ chunks = []
+ for r in res["hits"]["hits"]:
+ chunks.append(
+ Chunk(
+ chunk_id=r["_id"],
+ content=r["_source"]["content"],
+ metadata=json.loads(r["_source"]["metadata"]),
+ )
+ )
+ return chunks[:topk]
+
+ def similar_search_with_scores(
+ self,
+ text,
+ top_k: int = 10,
+ score_threshold: float = 0.3,
+ filters: Optional[MetadataFilters] = None,
+ ) -> List[Chunk]:
+ """Search similar text with scores.
+
+ Args:
+ text(str): text.
+ top_k(int): top k.
+ min_score(float): min score.
+ filters(MetadataFilters): filters.
+
+ Return:
+ List[Tuple[str, float]]: similar text with scores.
+ """
+ es_query = {"query": {"match": {"content": text}}}
+ res = self._es_client.search(index=self._index_name, body=es_query)
+
+ chunks_with_scores = []
+ for r in res["hits"]["hits"]:
+ if r["_score"] >= score_threshold:
+ chunks_with_scores.append(
+ Chunk(
+ chunk_id=r["_id"],
+ content=r["_source"]["content"],
+ metadata=json.loads(r["_source"]["metadata"]),
+ score=r["_score"],
+ )
+ )
+ if score_threshold is not None and len(chunks_with_scores) == 0:
+ logger.warning(
+ "No relevant docs were retrieved using the relevance score"
+ f" threshold {score_threshold}"
+ )
+ return chunks_with_scores[:top_k]
+
+ async def aload_document(self, chunks: List[Chunk]) -> List[str]:
+ """Async load document in elasticsearch.
+
+ Args:
+ chunks(List[Chunk]): document chunks.
+ Return:
+ List[str]: chunk ids.
+ """
+ return await blocking_func_to_async(self._executor, self.load_document, chunks)
+
+ def delete_by_ids(self, ids: str) -> List[str]:
+ """Delete document by ids.
+
+ Args:
+ ids(List[str]): document ids.
+ Return:
+ return ids.
+ """
+ id_list = ids.split(",")
+ bulk_body = [
+ {"delete": {"_index": self._index_name, "_id": doc_id}}
+ for doc_id in id_list
+ ]
+ self._es_client.bulk(body=bulk_body)
+ return id_list
+
+ def delete_vector_name(self, index_name: str):
+ """Delete index by name.
+
+ Args:
+ index_name(str): The name of index to delete.
+ """
+ self._es_client.indices.delete(index=self._index_name)
diff --git a/dbgpt/storage/full_text/opensearch.py b/dbgpt/storage/full_text/opensearch.py
new file mode 100644
index 000000000..d9a6ec378
--- /dev/null
+++ b/dbgpt/storage/full_text/opensearch.py
@@ -0,0 +1,63 @@
+"""OpenSearch index store."""
+from typing import List, Optional
+
+from dbgpt.core import Chunk
+from dbgpt.rag.index.base import IndexStoreBase
+from dbgpt.storage.vector_store.filters import MetadataFilters
+
+
+class OpenSearch(IndexStoreBase):
+ """OpenSearch index store."""
+
+ def load_document(self, chunks: List[Chunk]) -> List[str]:
+ """Load document in index database.
+
+ Args:
+ chunks(List[Chunk]): document chunks.
+ Return:
+ List[str]: chunk ids.
+ """
+ pass
+
+ def aload_document(self, chunks: List[Chunk]) -> List[str]:
+ """Async load document in index database.
+
+ Args:
+ chunks(List[Chunk]): document chunks.
+ Return:
+ List[str]: chunk ids.
+ """
+ pass
+
+ def similar_search_with_scores(
+ self,
+ text,
+ topk,
+ score_threshold: float,
+ filters: Optional[MetadataFilters] = None,
+ ) -> List[Chunk]:
+ """Similar search with scores in index database.
+
+ Args:
+ text(str): The query text.
+ topk(int): The number of similar documents to return.
+ score_threshold(int): score_threshold: Optional, a floating point value
+ between 0 to 1
+ filters(Optional[MetadataFilters]): metadata filters.
+ Return:
+ List[Chunk]: The similar documents.
+ """
+ pass
+
+ def delete_by_ids(self, ids: str):
+ """Delete docs.
+
+ Args:
+ ids(str): The vector ids to delete, separated by comma.
+
+ """
+ pass
+
+ def delete_vector_name(self, index_name: str):
+ """Delete name."""
+ pass
diff --git a/dbgpt/storage/knowledge_graph/base.py b/dbgpt/storage/knowledge_graph/base.py
index 55dda8a35..c10cb63de 100644
--- a/dbgpt/storage/knowledge_graph/base.py
+++ b/dbgpt/storage/knowledge_graph/base.py
@@ -1,7 +1,7 @@
"""Knowledge graph base class."""
import logging
from abc import ABC, abstractmethod
-from typing import Optional
+from typing import List, Optional
from dbgpt._private.pydantic import ConfigDict
from dbgpt.rag.index.base import IndexStoreBase, IndexStoreConfig
@@ -23,6 +23,6 @@ class KnowledgeGraphBase(IndexStoreBase, ABC):
def query_graph(self, limit: Optional[int] = None) -> Graph:
"""Get graph data."""
- def delete_by_ids(self, ids: str):
+ def delete_by_ids(self, ids: str) -> List[str]:
"""Delete document by ids."""
raise Exception("Delete document not supported by knowledge graph")
diff --git a/dbgpt/storage/vector_store/__init__.py b/dbgpt/storage/vector_store/__init__.py
index 8314e4a4f..cce559267 100644
--- a/dbgpt/storage/vector_store/__init__.py
+++ b/dbgpt/storage/vector_store/__init__.py
@@ -62,6 +62,15 @@ def _import_openspg() -> Tuple[Type, Type]:
return OpenSPG, OpenSPGConfig
+def _import_full_text() -> Tuple[Type, Type]:
+ from dbgpt.storage.full_text.elasticsearch import (
+ ElasticDocumentConfig,
+ ElasticDocumentStore,
+ )
+
+ return ElasticDocumentStore, ElasticDocumentConfig
+
+
def __getattr__(name: str) -> Tuple[Type, Type]:
if name == "Chroma":
return _import_chroma()
@@ -79,6 +88,8 @@ def __getattr__(name: str) -> Tuple[Type, Type]:
return _import_builtin_knowledge_graph()
elif name == "OpenSPG":
return _import_openspg()
+ elif name == "FullText":
+ return _import_full_text()
else:
raise AttributeError(f"Could not find: {name}")
@@ -94,4 +105,6 @@ __vector_store__ = [
__knowledge_graph__ = ["KnowledgeGraph", "OpenSPG"]
-__all__ = __vector_store__ + __knowledge_graph__
+__document_store__ = ["FullText"]
+
+__all__ = __vector_store__ + __knowledge_graph__ + __document_store__
diff --git a/dbgpt/storage/vector_store/elastic_store.py b/dbgpt/storage/vector_store/elastic_store.py
index 328af163b..dac79d9e8 100644
--- a/dbgpt/storage/vector_store/elastic_store.py
+++ b/dbgpt/storage/vector_store/elastic_store.py
@@ -145,7 +145,7 @@ class ElasticStore(VectorStoreBase):
elasticsearch_vector_config.get("name") or vector_store_config.name
)
# name to hex
- if string_utils.is_all_chinese(self.collection_name):
+ if string_utils.contains_chinese(self.collection_name):
bytes_str = self.collection_name.encode("utf-8")
hex_str = bytes_str.hex()
self.collection_name = hex_str
diff --git a/dbgpt/storage/vector_store/milvus_store.py b/dbgpt/storage/vector_store/milvus_store.py
index 1e0a612b6..b8b036770 100644
--- a/dbgpt/storage/vector_store/milvus_store.py
+++ b/dbgpt/storage/vector_store/milvus_store.py
@@ -174,7 +174,7 @@ class MilvusStore(VectorStoreBase):
self.collection_name = (
milvus_vector_config.get("name") or vector_store_config.name
)
- if string_utils.is_all_chinese(self.collection_name):
+ if string_utils.contains_chinese(self.collection_name):
bytes_str = self.collection_name.encode("utf-8")
hex_str = bytes_str.hex()
self.collection_name = hex_str
diff --git a/dbgpt/util/string_utils.py b/dbgpt/util/string_utils.py
index e32eb1288..b7534bb50 100644
--- a/dbgpt/util/string_utils.py
+++ b/dbgpt/util/string_utils.py
@@ -9,6 +9,11 @@ def is_all_chinese(text):
return match is not None
+def contains_chinese(text):
+ """Check if the text contains Chinese characters."""
+ return re.search(r"[\u4e00-\u9fa5]", text) is not None
+
+
def is_number_chinese(text):
### Determine whether the string is numbers and Chinese
pattern = re.compile(r"^[\d一-龥]+$")
diff --git a/docs/docs/awel/cookbook/first_rag_with_awel.md b/docs/docs/awel/cookbook/first_rag_with_awel.md
index a2f0757ae..aa4da6dec 100644
--- a/docs/docs/awel/cookbook/first_rag_with_awel.md
+++ b/docs/docs/awel/cookbook/first_rag_with_awel.md
@@ -76,14 +76,12 @@ from dbgpt.core.awel import DAG
from dbgpt.rag import ChunkParameters
from dbgpt.rag.knowledge import KnowledgeType
from dbgpt.rag.operators import EmbeddingAssemblerOperator, KnowledgeOperator
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
# Delete old vector store directory(/tmp/awel_rag_test_vector_store)
shutil.rmtree("/tmp/awel_rag_test_vector_store", ignore_errors=True)
-vector_connector = VectorStoreConnector.from_default(
- "Chroma",
+vector_store = ChromaStore(
vector_store_config=ChromaVectorConfig(
name="test_vstore",
persist_path="/tmp/awel_rag_test_vector_store",
@@ -95,7 +93,7 @@ with DAG("load_knowledge_dag") as knowledge_dag:
# Load knowledge from URL
knowledge_task = KnowledgeOperator(knowledge_type=KnowledgeType.URL.name)
assembler_task = EmbeddingAssemblerOperator(
- vector_store_connector=vector_connector,
+ index_store=vector_store,
chunk_parameters=ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
)
knowledge_task >> assembler_task
@@ -116,7 +114,7 @@ from dbgpt.rag.operators import EmbeddingRetrieverOperator
with DAG("retriever_dag") as retriever_dag:
retriever_task = EmbeddingRetrieverOperator(
top_k=3,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
content_task = MapOperator(lambda cks: "\n".join(c.content for c in cks))
retriever_task >> content_task
@@ -218,7 +216,7 @@ with DAG("llm_rag_dag") as rag_dag:
input_task = InputOperator(input_source=InputSource.from_callable())
retriever_task = EmbeddingRetrieverOperator(
top_k=3,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
content_task = MapOperator(lambda cks: "\n".join(c.content for c in cks))
@@ -256,10 +254,10 @@ from dbgpt.core.awel import DAG, MapOperator, InputOperator, JoinOperator, Input
from dbgpt.core.operators import PromptBuilderOperator, RequestBuilderOperator
from dbgpt.rag import ChunkParameters
from dbgpt.rag.knowledge import KnowledgeType
-from dbgpt.rag.operators import EmbeddingAssemblerOperator, KnowledgeOperator, EmbeddingRetrieverOperator
+from dbgpt.rag.operators import EmbeddingAssemblerOperator, KnowledgeOperator,
+ EmbeddingRetrieverOperator
from dbgpt.rag.embedding import DefaultEmbeddingFactory
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
from dbgpt.model.operators import LLMOperator
from dbgpt.model.proxy import OpenAILLMClient
@@ -273,8 +271,7 @@ llm_client = OpenAILLMClient()
# Delete old vector store directory(/tmp/awel_rag_test_vector_store)
shutil.rmtree("/tmp/awel_rag_test_vector_store", ignore_errors=True)
-vector_connector = VectorStoreConnector.from_default(
- "Chroma",
+vector_store = ChromaStore(
vector_store_config=ChromaVectorConfig(
name="test_vstore",
persist_path="/tmp/awel_rag_test_vector_store",
@@ -286,7 +283,7 @@ with DAG("load_knowledge_dag") as knowledge_dag:
# Load knowledge from URL
knowledge_task = KnowledgeOperator(knowledge_type=KnowledgeType.URL.name)
assembler_task = EmbeddingAssemblerOperator(
- vector_store_connector=vector_connector,
+ index_store=vector_store,
chunk_parameters=ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
)
knowledge_task >> assembler_task
@@ -294,7 +291,6 @@ with DAG("load_knowledge_dag") as knowledge_dag:
chunks = asyncio.run(assembler_task.call("https://docs.dbgpt.site/docs/latest/awel/"))
print(f"Chunk length: {len(chunks)}\n")
-
prompt = """Based on the known information below, provide users with professional and concise answers to their questions.
If the answer cannot be obtained from the provided content, please say:
"The information provided in the knowledge base is not sufficient to answer this question.".
@@ -305,17 +301,17 @@ It is forbidden to make up information randomly. When answering, it is best to s
{question}
"""
-
with DAG("llm_rag_dag") as rag_dag:
input_task = InputOperator(input_source=InputSource.from_callable())
retriever_task = EmbeddingRetrieverOperator(
top_k=3,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
content_task = MapOperator(lambda cks: "\n".join(c.content for c in cks))
-
- merge_task = JoinOperator(lambda context, question: {"context": context, "question": question})
-
+
+ merge_task = JoinOperator(
+ lambda context, question: {"context": context, "question": question})
+
prompt_task = PromptBuilderOperator(prompt)
# The model is gpt-3.5-turbo, you can replace it with other models.
req_build_task = RequestBuilderOperator(model="gpt-3.5-turbo")
diff --git a/docs/docs/awel/cookbook/write_your_chat_database.md b/docs/docs/awel/cookbook/write_your_chat_database.md
index c6d9af996..1e9e9d26d 100644
--- a/docs/docs/awel/cookbook/write_your_chat_database.md
+++ b/docs/docs/awel/cookbook/write_your_chat_database.md
@@ -84,19 +84,19 @@ import shutil
from dbgpt.core.awel import DAG, InputOperator
from dbgpt.rag import ChunkParameters
from dbgpt.rag.operators import DBSchemaAssemblerOperator
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig, ChromaStore
# Delete old vector store directory(/tmp/awel_with_data_vector_store)
shutil.rmtree("/tmp/awel_with_data_vector_store", ignore_errors=True)
-vector_connector = VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="db_schema_vector_store",
- persist_path="/tmp/awel_with_data_vector_store",
- ),
- embedding_fn=embeddings
+vector_store = ChromaStore(
+ ChromaVectorConfig(
+ embedding_fn=embeddings,
+ vector_store_config=ChromaVectorConfig(
+ name="db_schema_vector_store",
+ persist_path="/tmp/awel_with_data_vector_store",
+ ),
+ )
)
with DAG("load_schema_dag") as load_schema_dag:
@@ -104,7 +104,7 @@ with DAG("load_schema_dag") as load_schema_dag:
# Load database schema to vector store
assembler_task = DBSchemaAssemblerOperator(
connector=db_conn,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
chunk_parameters=ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
)
input_task >> assembler_task
@@ -124,7 +124,7 @@ with DAG("retrieve_schema_dag") as retrieve_schema_dag:
# Retrieve database schema from vector store
retriever_task = DBSchemaRetrieverOperator(
top_k=1,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
input_task >> retriever_task
@@ -244,7 +244,7 @@ with DAG("chat_data_dag") as chat_data_dag:
input_task = InputOperator(input_source=InputSource.from_callable())
retriever_task = DBSchemaRetrieverOperator(
top_k=1,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
content_task = MapOperator(lambda cks: [c.content for c in cks])
merge_task = JoinOperator(lambda table_info, ext_dict: {"table_info": table_info, **ext_dict})
@@ -456,8 +456,7 @@ from dbgpt.model.proxy import OpenAILLMClient
from dbgpt.rag import ChunkParameters
from dbgpt.rag.embedding import DefaultEmbeddingFactory
from dbgpt.rag.operators import DBSchemaAssemblerOperator, DBSchemaRetrieverOperator
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig, ChromaStore
# Delete old vector store directory(/tmp/awel_with_data_vector_store)
shutil.rmtree("/tmp/awel_with_data_vector_store", ignore_errors=True)
@@ -488,13 +487,14 @@ db_conn.create_temp_tables(
}
)
-vector_connector = VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="db_schema_vector_store",
- persist_path="/tmp/awel_with_data_vector_store",
- ),
- embedding_fn=embeddings,
+vector_store = ChromaStore(
+ ChromaVectorConfig(
+ embedding_fn=embeddings,
+ vector_store_config=ChromaVectorConfig(
+ name="db_schema_vector_store",
+ persist_path="/tmp/awel_with_data_vector_store",
+ ),
+ )
)
antv_charts = [
@@ -627,7 +627,7 @@ with DAG("load_schema_dag") as load_schema_dag:
# Load database schema to vector store
assembler_task = DBSchemaAssemblerOperator(
connector=db_conn,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
chunk_parameters=ChunkParameters(chunk_strategy="CHUNK_BY_SIZE"),
)
input_task >> assembler_task
@@ -635,12 +635,11 @@ with DAG("load_schema_dag") as load_schema_dag:
chunks = asyncio.run(assembler_task.call())
print(chunks)
-
with DAG("chat_data_dag") as chat_data_dag:
input_task = InputOperator(input_source=InputSource.from_callable())
retriever_task = DBSchemaRetrieverOperator(
top_k=1,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
content_task = MapOperator(lambda cks: [c.content for c in cks])
merge_task = JoinOperator(
@@ -653,11 +652,11 @@ with DAG("chat_data_dag") as chat_data_dag:
db_query_task = DatasourceOperator(connector=db_conn)
(
- input_task
- >> MapOperator(lambda x: x["user_input"])
- >> retriever_task
- >> content_task
- >> merge_task
+ input_task
+ >> MapOperator(lambda x: x["user_input"])
+ >> retriever_task
+ >> content_task
+ >> merge_task
)
input_task >> merge_task
merge_task >> prompt_task >> req_build_task >> llm_task >> sql_parse_task
diff --git a/docs/docs/cookbook/rag/graph_rag_app_develop.md b/docs/docs/cookbook/rag/graph_rag_app_develop.md
index 18d5682a0..d0a690beb 100644
--- a/docs/docs/cookbook/rag/graph_rag_app_develop.md
+++ b/docs/docs/cookbook/rag/graph_rag_app_develop.md
@@ -129,19 +129,20 @@ To maintain compatibility with existing conventional RAG frameworks, we continue
```python
from dbgpt.model.proxy.llms.chatgpt import OpenAILLMClient
-from dbgpt.storage.vector_store.base import VectorStoreConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.knowledge_graph.knowledge_graph import (
+ BuiltinKnowledgeGraph,
+ BuiltinKnowledgeGraphConfig,
+)
-def _create_vector_connector():
- """Create vector connector."""
- return VectorStoreConnector(
- vector_store_type="KnowledgeGraph",
- vector_store_config=VectorStoreConfig(
- name="graph_rag_test_kg",
+def _create_kg_connector():
+ """Create knowledge graph connector."""
+ return BuiltinKnowledgeGraph(
+ config=BuiltinKnowledgeGraphConfig(
+ name="graph_rag_test",
embedding_fn=None,
llm_client=OpenAILLMClient(),
- model_name="gpt-4"
- )
+ model_name="gpt-4",
+ ),
)
```
@@ -162,13 +163,13 @@ from dbgpt.rag.knowledge import KnowledgeFactory
async def main():
file_path = os.path.join(ROOT_PATH, "examples/test_files/tranformers_story.md")
knowledge = KnowledgeFactory.from_file_path(file_path)
- vector_connector = _create_kg_connector()
+ graph_store = _create_kg_connector()
chunk_parameters = ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
# get embedding assembler
assembler = EmbeddingAssembler.load_from_knowledge(
knowledge=knowledge,
chunk_parameters=chunk_parameters,
- vector_store_connector=vector_connector,
+ index_store=graph_store,
)
assembler.persist()
# get embeddings retriever
@@ -178,7 +179,7 @@ async def main():
score_threshold=0.3
)
print(f"embedding rag example results:{chunks}")
- vector_connector.delete_vector_name("graph_rag_test")
+ graph_store.delete_vector_name("graph_rag_test")
```
diff --git a/docs/docs/cookbook/rag/keyword_rag_app_develop.md b/docs/docs/cookbook/rag/keyword_rag_app_develop.md
new file mode 100644
index 000000000..e02c89466
--- /dev/null
+++ b/docs/docs/cookbook/rag/keyword_rag_app_develop.md
@@ -0,0 +1,132 @@
+# Keyword Search RAG User Manual
+
+In this example, we will show how to use the Full Text Search RAG framework in DB-GPT. Using traditional full-text search to implement RAG can, to some extent, alleviate the uncertainty and interpretability issues brought about by vector database retrieval.
+
+You can refer to the python example file `DB-GPT/examples/rag/keyword_rag_example.py` in the source code. This example demonstrates how to load knowledge from a document and persist it in a full text store. Subsequently, it recalls knowledge relevant to your question by searching for keywords in the full text store.
+
+### The Constraints of Vector Retrieve
+Vector Retrieve offers clear advantages, the technology does have some constraints:
+- Computationally Intensive - Generating vectors for entire corpora of documents and querying based on vector similarity requires significantly more processing power than keyword indexing and matching. Latency can be an issue if systems are not properly optimized.
+- Requires Massive Training Data - The semantic connections made by models like BERT rely on being trained on massive, diverse datasets over long periods. This data may not be readily available for specialized corpora, limiting the quality of vectors.
+- Less Effective for Precise Keyword Queries - Vector search adds little benefit when queries contain clear, precise keywords and intent. Searching for "apple fruit" would likely return poorer results than just "apple" because the vector focuses on overall meaning more than keywords.
+
+### How to choose Between Vector Retrieve and Keyword Retrieve ?
+When is vector search preferable over keyword search, and vice versa? Here are some best practices on when to use each:
+
+When to Use Vector Search
+
+Early stage research when query intent is vague or broad
+Need to grasp concepts and subject matter more than keywords
+Exploring a topic with loose information needs
+User search queries are more conversational
+The semantic capabilities of vector search allow it to shine for these use cases. It can point users in the right direction even with limited keywords or understanding of a topic.
+
+When to Use Keyword Search:
+
+- Looking for something ultra-specific and already understand the topic
+- Research is narrowly focused with clear objectives
+- Queries contain unique proper nouns like brand names
+- Needs require fast results more than exhaustive relevancy
+For precise or time-sensitive queries, keyword search will target the exact terms efficiently. Vector search may meander with unnecessary semantic expansion.
+
+The search method should align with the user's intent and specificity needs. Vector search for exploration, keyword search for precision. With both available, users get the best of both worlds.
+
+### Install Dependencies
+
+First, you need to install the `dbgpt` library.
+
+```bash
+pip install "dbgpt[rag]>=0.5.8"
+````
+
+### Prepare Full Text Search Engine
+
+`Elasticsearch` is the distributed search and analytics engine at the heart of the Elastic Stack. Logstash and Beats facilitate collecting, aggregating, and enriching your data and storing it in Elasticsearch. Kibana enables you to interactively explore, visualize, and share insights into your data and manage and monitor the stack. Elasticsearch is where the indexing, search, and analysis magic happens.
+refer https://www.elastic.co/guide/en/elasticsearch/reference/current/elasticsearch-intro.html
+
+Install Elasticsearch refer https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html
+
+### Keyword Search Configuration
+
+Set variables below in `.env` file, let DB-GPT know how to connect to Full Text Search Engine Storage.
+
+```
+ELASTICSEARCH_URL=localhost
+ELASTICSEARCH_PORT=9200
+ELASTICSEARCH_USERNAME=elastic
+ELASTICSEARCH_PASSWORD=dbgpt
+```
+
+
+
+### Load into Full Text Search Engine
+
+When using a `Elaticsearch` full text engine as the underlying knowledge storage platform, it is necessary to build document inverted index to facilitate the archiving and retrieval of documents.
+
+The following code demonstrates how to create a connection to the Elasticsearch search engine.
+```python
+from dbgpt.storage.full_text.elasticsearch import ElasticDocumentConfig, \
+ ElasticDocumentStore
+def _create_es_connector():
+ """Create es connector."""
+ config = ElasticDocumentConfig(
+ name="keyword_rag_test",
+ uri="localhost",
+ port="9200",
+ user="elastic",
+ password="dbgpt",
+ )
+
+ return ElasticDocumentStore(config)
+```
+
+
+
+### Keyword Retrieve from Full Text Search Engine
+
+Keyword Retrieve is a simple and efficient way to retrieve relevant information from a large number of documents. It is based on the full-text search engine Elasticsearch. The user can input a query and retrieve the most relevant documents based on the query.
+```python
+import os
+
+from dbgpt.configs.model_config import ROOT_PATH
+from dbgpt.rag import ChunkParameters
+from dbgpt.rag.assembler import EmbeddingAssembler
+from dbgpt.rag.knowledge import KnowledgeFactory
+
+async def main():
+ file_path = os.path.join(ROOT_PATH, "docs/docs/awel/awel.md")
+ knowledge = KnowledgeFactory.from_file_path(file_path)
+ keyword_store = _create_es_connector()
+ chunk_parameters = ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
+ # get embedding assembler
+ assembler = EmbeddingAssembler.load_from_knowledge(
+ knowledge=knowledge,
+ chunk_parameters=chunk_parameters,
+ index_store=keyword_store,
+ )
+ assembler.persist()
+ # get embeddings retriever
+ retriever = assembler.as_retriever(3)
+ chunks = await retriever.aretrieve_with_scores("what is awel talk about", 0.3)
+ print(f"keyword rag example results:{chunks}")
+```
+
+
+
+
+### Chat Knowledge via Keyword RAG
+
+Here we demonstrate how to achieve chat knowledge through Keyword RAG on web page.
+
+First, create a knowledge base using the `Full Text` type. Upload the knowledge documents and wait for the slicing to complete.
+
+
+
+
+
+
+
+Start chat to knowledge based on Keyword RAG.
+
+
+
diff --git a/docs/sidebars.js b/docs/sidebars.js
index 07d7b0c93..164b662ac 100755
--- a/docs/sidebars.js
+++ b/docs/sidebars.js
@@ -477,7 +477,10 @@ const sidebars = {
{
type: 'doc',
id: 'cookbook/rag/graph_rag_app_develop',
- }
+ },{
+ type: 'doc',
+ id: 'cookbook/rag/keyword_rag_app_develop',
+ },
],
},
{
diff --git a/docs/static/img/chat_knowledge/keyword_rag/create_keyword_rag.jpg b/docs/static/img/chat_knowledge/keyword_rag/create_keyword_rag.jpg
new file mode 100644
index 000000000..3a5132bf4
Binary files /dev/null and b/docs/static/img/chat_knowledge/keyword_rag/create_keyword_rag.jpg differ
diff --git a/docs/static/img/chat_knowledge/keyword_rag/keyword_search_chat.jpg b/docs/static/img/chat_knowledge/keyword_rag/keyword_search_chat.jpg
new file mode 100644
index 000000000..cdd0ceef7
Binary files /dev/null and b/docs/static/img/chat_knowledge/keyword_rag/keyword_search_chat.jpg differ
diff --git a/examples/awel/simple_nl_schema_sql_chart_example.py b/examples/awel/simple_nl_schema_sql_chart_example.py
index 46d05af9b..0bd451c56 100644
--- a/examples/awel/simple_nl_schema_sql_chart_example.py
+++ b/examples/awel/simple_nl_schema_sql_chart_example.py
@@ -12,8 +12,7 @@ from dbgpt.datasource.rdbms.conn_sqlite import SQLiteTempConnector
from dbgpt.model.proxy import OpenAILLMClient
from dbgpt.rag.embedding import DefaultEmbeddingFactory
from dbgpt.rag.operators.schema_linking import SchemaLinkingOperator
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
from dbgpt.util.chat_util import run_async_tasks
"""AWEL: Simple nl-schemalinking-sql-chart operator example
@@ -52,17 +51,16 @@ INPUT_PROMPT = "\n###Input:\n{}\n###Response:"
def _create_vector_connector():
"""Create vector connector."""
- return VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="vector_name",
- persist_path=os.path.join(PILOT_PATH, "data"),
- ),
+ config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="embedding_rag_test",
embedding_fn=DefaultEmbeddingFactory(
default_model_name=os.path.join(MODEL_PATH, "text2vec-large-chinese"),
).create(),
)
+ return ChromaStore(config)
+
def _create_temporary_connection():
"""Create a temporary database connection for testing."""
diff --git a/examples/rag/cross_encoder_rerank_example.py b/examples/rag/cross_encoder_rerank_example.py
index c7e3dbaf7..faee8cb05 100644
--- a/examples/rag/cross_encoder_rerank_example.py
+++ b/examples/rag/cross_encoder_rerank_example.py
@@ -17,24 +17,21 @@ from dbgpt.rag.assembler import EmbeddingAssembler
from dbgpt.rag.embedding import DefaultEmbeddingFactory
from dbgpt.rag.knowledge import KnowledgeFactory
from dbgpt.rag.retriever.rerank import CrossEncoderRanker
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
def _create_vector_connector():
"""Create vector connector."""
- print(f"persist_path:{os.path.join(PILOT_PATH, 'data')}")
- return VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="example_cross_encoder_rerank",
- persist_path=os.path.join(PILOT_PATH, "data"),
- ),
+ config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="embedding_rag_test",
embedding_fn=DefaultEmbeddingFactory(
default_model_name=os.path.join(MODEL_PATH, "text2vec-large-chinese"),
).create(),
)
+ return ChromaStore(config)
+
async def main():
file_path = os.path.join(ROOT_PATH, "docs/docs/awel/awel.md")
@@ -45,7 +42,7 @@ async def main():
assembler = EmbeddingAssembler.load_from_knowledge(
knowledge=knowledge,
chunk_parameters=chunk_parameters,
- vector_store_connector=vector_connector,
+ index_store=vector_connector,
)
assembler.persist()
# get embeddings retriever
@@ -57,7 +54,7 @@ async def main():
print("before rerank results:\n")
for i, chunk in enumerate(chunks):
print(f"----{i+1}.chunk content:{chunk.content}\n score:{chunk.score}")
- # cross-encoder rerank
+ # cross-encoder rerankpython
cross_encoder_model = os.path.join(MODEL_PATH, "bge-reranker-base")
rerank = CrossEncoderRanker(topk=3, model=cross_encoder_model)
new_chunks = rerank.rank(chunks, query=query)
diff --git a/examples/rag/db_schema_rag_example.py b/examples/rag/db_schema_rag_example.py
index 0ce8dc060..1524634fa 100644
--- a/examples/rag/db_schema_rag_example.py
+++ b/examples/rag/db_schema_rag_example.py
@@ -4,8 +4,7 @@ from dbgpt.configs.model_config import MODEL_PATH, PILOT_PATH
from dbgpt.datasource.rdbms.conn_sqlite import SQLiteTempConnector
from dbgpt.rag.assembler import DBSchemaAssembler
from dbgpt.rag.embedding import DefaultEmbeddingFactory
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
"""DB struct rag example.
pre-requirements:
@@ -46,27 +45,27 @@ def _create_temporary_connection():
def _create_vector_connector():
"""Create vector connector."""
- return VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="db_schema_vector_store_name",
- persist_path=os.path.join(PILOT_PATH, "data"),
- ),
+ config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="dbschema_rag_test",
embedding_fn=DefaultEmbeddingFactory(
default_model_name=os.path.join(MODEL_PATH, "text2vec-large-chinese"),
).create(),
)
+ return ChromaStore(config)
+
if __name__ == "__main__":
connection = _create_temporary_connection()
- vector_connector = _create_vector_connector()
+ index_store = _create_vector_connector()
assembler = DBSchemaAssembler.load_from_connection(
connector=connection,
- vector_store_connector=vector_connector,
+ index_store=index_store,
)
assembler.persist()
# get db schema retriever
retriever = assembler.as_retriever(top_k=1)
chunks = retriever.retrieve("show columns from user")
print(f"db schema rag example results:{[chunk.content for chunk in chunks]}")
+ index_store.delete_vector_name("dbschema_rag_test")
diff --git a/examples/rag/embedding_rag_example.py b/examples/rag/embedding_rag_example.py
index 6104e7be9..925bba02a 100644
--- a/examples/rag/embedding_rag_example.py
+++ b/examples/rag/embedding_rag_example.py
@@ -6,8 +6,7 @@ from dbgpt.rag import ChunkParameters
from dbgpt.rag.assembler import EmbeddingAssembler
from dbgpt.rag.embedding import DefaultEmbeddingFactory
from dbgpt.rag.knowledge import KnowledgeFactory
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
"""Embedding rag example.
pre-requirements:
@@ -24,28 +23,27 @@ from dbgpt.storage.vector_store.connector import VectorStoreConnector
def _create_vector_connector():
"""Create vector connector."""
- return VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="db_schema_vector_store_name",
- persist_path=os.path.join(PILOT_PATH, "data"),
- ),
+ config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="embedding_rag_test",
embedding_fn=DefaultEmbeddingFactory(
default_model_name=os.path.join(MODEL_PATH, "text2vec-large-chinese"),
).create(),
)
+ return ChromaStore(config)
+
async def main():
file_path = os.path.join(ROOT_PATH, "docs/docs/awel/awel.md")
knowledge = KnowledgeFactory.from_file_path(file_path)
- vector_connector = _create_vector_connector()
+ vector_store = _create_vector_connector()
chunk_parameters = ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
# get embedding assembler
assembler = EmbeddingAssembler.load_from_knowledge(
knowledge=knowledge,
chunk_parameters=chunk_parameters,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
assembler.persist()
# get embeddings retriever
diff --git a/examples/rag/graph_rag_example.py b/examples/rag/graph_rag_example.py
index 069361c2d..c241bd6b9 100644
--- a/examples/rag/graph_rag_example.py
+++ b/examples/rag/graph_rag_example.py
@@ -6,9 +6,11 @@ from dbgpt.model.proxy.llms.chatgpt import OpenAILLMClient
from dbgpt.rag import ChunkParameters
from dbgpt.rag.assembler import EmbeddingAssembler
from dbgpt.rag.knowledge import KnowledgeFactory
-from dbgpt.storage.knowledge_graph.knowledge_graph import BuiltinKnowledgeGraphConfig
-from dbgpt.storage.vector_store.base import VectorStoreConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.rag.retriever import RetrieverStrategy
+from dbgpt.storage.knowledge_graph.knowledge_graph import (
+ BuiltinKnowledgeGraph,
+ BuiltinKnowledgeGraphConfig,
+)
"""GraphRAG example.
pre-requirements:
@@ -31,9 +33,8 @@ from dbgpt.storage.vector_store.connector import VectorStoreConnector
def _create_kg_connector():
"""Create knowledge graph connector."""
- return VectorStoreConnector(
- vector_store_type="KnowledgeGraph",
- vector_store_config=VectorStoreConfig(
+ return BuiltinKnowledgeGraph(
+ config=BuiltinKnowledgeGraphConfig(
name="graph_rag_test",
embedding_fn=None,
llm_client=OpenAILLMClient(),
@@ -45,22 +46,23 @@ def _create_kg_connector():
async def main():
file_path = os.path.join(ROOT_PATH, "examples/test_files/tranformers_story.md")
knowledge = KnowledgeFactory.from_file_path(file_path)
- vector_connector = _create_kg_connector()
+ graph_store = _create_kg_connector()
chunk_parameters = ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
# get embedding assembler
- assembler = EmbeddingAssembler.load_from_knowledge(
+ assembler = await EmbeddingAssembler.aload_from_knowledge(
knowledge=knowledge,
chunk_parameters=chunk_parameters,
- vector_store_connector=vector_connector,
+ index_store=graph_store,
+ retrieve_strategy=RetrieverStrategy.GRAPH,
)
- assembler.persist()
+ await assembler.apersist()
# get embeddings retriever
retriever = assembler.as_retriever(3)
chunks = await retriever.aretrieve_with_scores(
"What actions has Megatron taken ?", score_threshold=0.3
)
print(f"embedding rag example results:{chunks}")
- vector_connector.delete_vector_name("graph_rag_test")
+ graph_store.delete_vector_name("graph_rag_test")
if __name__ == "__main__":
diff --git a/examples/rag/keyword_rag_example.py b/examples/rag/keyword_rag_example.py
new file mode 100644
index 000000000..a2f33099a
--- /dev/null
+++ b/examples/rag/keyword_rag_example.py
@@ -0,0 +1,55 @@
+import asyncio
+import os
+
+from dbgpt.configs.model_config import ROOT_PATH
+from dbgpt.rag import ChunkParameters
+from dbgpt.rag.assembler import EmbeddingAssembler
+from dbgpt.rag.knowledge import KnowledgeFactory
+from dbgpt.storage.full_text.elasticsearch import (
+ ElasticDocumentConfig,
+ ElasticDocumentStore,
+)
+
+"""Keyword rag example.
+ pre-requirements:
+ set your Elasticsearch environment.
+
+ Examples:
+ ..code-block:: shell
+ python examples/rag/keyword_rag_example.py
+"""
+
+
+def _create_es_connector():
+ """Create es connector."""
+ config = ElasticDocumentConfig(
+ name="keyword_rag_test",
+ uri="localhost",
+ port="9200",
+ user="elastic",
+ password="dbgpt",
+ )
+
+ return ElasticDocumentStore(config)
+
+
+async def main():
+ file_path = os.path.join(ROOT_PATH, "docs/docs/awel/awel.md")
+ knowledge = KnowledgeFactory.from_file_path(file_path)
+ keyword_store = _create_es_connector()
+ chunk_parameters = ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
+ # get embedding assembler
+ assembler = EmbeddingAssembler.load_from_knowledge(
+ knowledge=knowledge,
+ chunk_parameters=chunk_parameters,
+ index_store=keyword_store,
+ )
+ assembler.persist()
+ # get embeddings retriever
+ retriever = assembler.as_retriever(3)
+ chunks = await retriever.aretrieve_with_scores("what is awel talk about", 0.3)
+ print(f"keyword rag example results:{chunks}")
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/examples/rag/metadata_filter_example.py b/examples/rag/metadata_filter_example.py
index b1bed93f4..f07050605 100644
--- a/examples/rag/metadata_filter_example.py
+++ b/examples/rag/metadata_filter_example.py
@@ -14,35 +14,33 @@ from dbgpt.rag import ChunkParameters
from dbgpt.rag.assembler import EmbeddingAssembler
from dbgpt.rag.embedding import DefaultEmbeddingFactory
from dbgpt.rag.knowledge import KnowledgeFactory
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
from dbgpt.storage.vector_store.filters import MetadataFilter, MetadataFilters
def _create_vector_connector():
"""Create vector connector."""
- return VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="example_metadata_filter_name",
- persist_path=os.path.join(PILOT_PATH, "data"),
- ),
+ config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="metadata_rag_test",
embedding_fn=DefaultEmbeddingFactory(
default_model_name=os.path.join(MODEL_PATH, "text2vec-large-chinese"),
).create(),
)
+ return ChromaStore(config)
+
async def main():
file_path = os.path.join(ROOT_PATH, "docs/docs/awel/awel.md")
knowledge = KnowledgeFactory.from_file_path(file_path)
- vector_connector = _create_vector_connector()
+ vector_store = _create_vector_connector()
chunk_parameters = ChunkParameters(chunk_strategy="CHUNK_BY_MARKDOWN_HEADER")
# get embedding assembler
assembler = EmbeddingAssembler.load_from_knowledge(
knowledge=knowledge,
chunk_parameters=chunk_parameters,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
assembler.persist()
# get embeddings retriever
@@ -54,6 +52,7 @@ async def main():
"what is awel talk about", 0.0, filters
)
print(f"embedding rag example results:{chunks}")
+ vector_store.delete_vector_name("metadata_rag_test")
if __name__ == "__main__":
diff --git a/examples/rag/rag_embedding_api_example.py b/examples/rag/rag_embedding_api_example.py
index 0b95549c7..03a9afd98 100644
--- a/examples/rag/rag_embedding_api_example.py
+++ b/examples/rag/rag_embedding_api_example.py
@@ -31,8 +31,7 @@ from dbgpt.rag import ChunkParameters
from dbgpt.rag.assembler import EmbeddingAssembler
from dbgpt.rag.embedding import OpenAPIEmbeddings
from dbgpt.rag.knowledge import KnowledgeFactory
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
def _create_embeddings(
@@ -54,33 +53,32 @@ def _create_embeddings(
def _create_vector_connector():
"""Create vector connector."""
-
- return VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="example_embedding_api_vector_store_name",
- persist_path=os.path.join(PILOT_PATH, "data"),
- ),
+ config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="embedding_api_rag_test",
embedding_fn=_create_embeddings(),
)
+ return ChromaStore(config)
+
async def main():
file_path = os.path.join(ROOT_PATH, "docs/docs/awel/awel.md")
knowledge = KnowledgeFactory.from_file_path(file_path)
- vector_connector = _create_vector_connector()
+ vector_store = _create_vector_connector()
chunk_parameters = ChunkParameters(chunk_strategy="CHUNK_BY_SIZE")
# get embedding assembler
assembler = EmbeddingAssembler.load_from_knowledge(
knowledge=knowledge,
chunk_parameters=chunk_parameters,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
assembler.persist()
# get embeddings retriever
retriever = assembler.as_retriever(3)
chunks = await retriever.aretrieve_with_scores("what is awel talk about", 0.3)
print(f"embedding rag example results:{chunks}")
+ vector_store.delete_vector_name("embedding_api_rag_test")
if __name__ == "__main__":
diff --git a/examples/rag/retriever_evaluation_example.py b/examples/rag/retriever_evaluation_example.py
index e0f8b4299..091215dba 100644
--- a/examples/rag/retriever_evaluation_example.py
+++ b/examples/rag/retriever_evaluation_example.py
@@ -15,8 +15,7 @@ from dbgpt.rag.evaluation.retriever import (
)
from dbgpt.rag.knowledge import KnowledgeFactory
from dbgpt.rag.operators import EmbeddingRetrieverOperator
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
def _create_embeddings(
@@ -28,19 +27,16 @@ def _create_embeddings(
).create()
-def _create_vector_connector(
- embeddings: Embeddings, space_name: str = "retriever_evaluation_example"
-) -> VectorStoreConnector:
+def _create_vector_connector(embeddings: Embeddings):
"""Create vector connector."""
- return VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name=space_name,
- persist_path=os.path.join(PILOT_PATH, "data"),
- ),
+ config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="embedding_rag_test",
embedding_fn=embeddings,
)
+ return ChromaStore(config)
+
async def main():
file_path = os.path.join(ROOT_PATH, "docs/docs/awel/awel.md")
@@ -52,7 +48,7 @@ async def main():
assembler = EmbeddingAssembler.load_from_knowledge(
knowledge=knowledge,
chunk_parameters=chunk_parameters,
- vector_store_connector=vector_connector,
+ index_store=vector_connector,
)
assembler.persist()
diff --git a/examples/rag/simple_dbschema_retriever_example.py b/examples/rag/simple_dbschema_retriever_example.py
index 0b524cab5..c159e6b39 100644
--- a/examples/rag/simple_dbschema_retriever_example.py
+++ b/examples/rag/simple_dbschema_retriever_example.py
@@ -11,7 +11,7 @@
retriever_task = DBSchemaRetrieverOperator(
connector=_create_temporary_connection()
top_k=1,
- vector_store_connector=vector_store_connector
+ index_store=vector_store_connector
)
```
@@ -27,31 +27,29 @@ from typing import Dict, List
from dbgpt._private.config import Config
from dbgpt._private.pydantic import BaseModel, Field
-from dbgpt.configs.model_config import EMBEDDING_MODEL_CONFIG, PILOT_PATH
+from dbgpt.configs.model_config import MODEL_PATH, PILOT_PATH
from dbgpt.core import Chunk
-from dbgpt.core.awel import DAG, HttpTrigger, InputOperator, JoinOperator, MapOperator
+from dbgpt.core.awel import DAG, HttpTrigger, JoinOperator, MapOperator
from dbgpt.datasource.rdbms.conn_sqlite import SQLiteTempConnector
from dbgpt.rag.embedding import DefaultEmbeddingFactory
from dbgpt.rag.operators import DBSchemaAssemblerOperator, DBSchemaRetrieverOperator
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
CFG = Config()
def _create_vector_connector():
"""Create vector connector."""
- return VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="vector_name",
- persist_path=os.path.join(PILOT_PATH, "data"),
- ),
+ config = ChromaVectorConfig(
+ persist_path=os.path.join(PILOT_PATH, "data"),
+ name="vector_name",
embedding_fn=DefaultEmbeddingFactory(
- default_model_name=EMBEDDING_MODEL_CONFIG[CFG.EMBEDDING_MODEL],
+ default_model_name=os.path.join(MODEL_PATH, "text2vec-large-chinese"),
).create(),
)
+ return ChromaStore(config)
+
def _create_temporary_connection():
"""Create a temporary database connection for testing."""
@@ -104,17 +102,17 @@ with DAG("simple_rag_db_schema_example") as dag:
)
request_handle_task = RequestHandleOperator()
query_operator = MapOperator(lambda request: request["query"])
- vector_store_connector = _create_vector_connector()
+ index_store = _create_vector_connector()
connector = _create_temporary_connection()
assembler_task = DBSchemaAssemblerOperator(
connector=connector,
- vector_store_connector=vector_store_connector,
+ index_store=index_store,
)
join_operator = JoinOperator(combine_function=_join_fn)
retriever_task = DBSchemaRetrieverOperator(
connector=_create_temporary_connection(),
top_k=1,
- vector_store_connector=vector_store_connector,
+ index_store=index_store,
)
result_parse_task = MapOperator(lambda chunks: [chunk.content for chunk in chunks])
trigger >> assembler_task >> join_operator
diff --git a/examples/rag/simple_rag_embedding_example.py b/examples/rag/simple_rag_embedding_example.py
index 56d9dcc7e..15b14f419 100644
--- a/examples/rag/simple_rag_embedding_example.py
+++ b/examples/rag/simple_rag_embedding_example.py
@@ -16,30 +16,28 @@ from typing import Dict, List
from dbgpt._private.config import Config
from dbgpt._private.pydantic import BaseModel, Field
-from dbgpt.configs.model_config import EMBEDDING_MODEL_CONFIG, PILOT_PATH
+from dbgpt.configs.model_config import EMBEDDING_MODEL_CONFIG, MODEL_PATH, PILOT_PATH
from dbgpt.core.awel import DAG, HttpTrigger, MapOperator
from dbgpt.rag.embedding import DefaultEmbeddingFactory
from dbgpt.rag.knowledge import KnowledgeType
from dbgpt.rag.operators import EmbeddingAssemblerOperator, KnowledgeOperator
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
CFG = Config()
-def _create_vector_connector() -> VectorStoreConnector:
+def _create_vector_connector():
"""Create vector connector."""
- return VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="vector_name",
- persist_path=os.path.join(PILOT_PATH, "data"),
- ),
+ config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="embedding_rag_test",
embedding_fn=DefaultEmbeddingFactory(
- default_model_name=EMBEDDING_MODEL_CONFIG[CFG.EMBEDDING_MODEL],
+ default_model_name=os.path.join(MODEL_PATH, "text2vec-large-chinese"),
).create(),
)
+ return ChromaStore(config)
+
class TriggerReqBody(BaseModel):
url: str = Field(..., description="url")
@@ -75,10 +73,10 @@ with DAG("simple_sdk_rag_embedding_example") as dag:
)
request_handle_task = RequestHandleOperator()
knowledge_operator = KnowledgeOperator(knowledge_type=KnowledgeType.URL.name)
- vector_connector = _create_vector_connector()
+ vector_store = _create_vector_connector()
url_parser_operator = MapOperator(map_function=lambda x: x["url"])
embedding_operator = EmbeddingAssemblerOperator(
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
output_task = ResultOperator()
(
diff --git a/examples/rag/simple_rag_retriever_example.py b/examples/rag/simple_rag_retriever_example.py
index 80786bf9e..308f790e4 100644
--- a/examples/rag/simple_rag_retriever_example.py
+++ b/examples/rag/simple_rag_retriever_example.py
@@ -31,7 +31,7 @@ from typing import Dict, List
from dbgpt._private.config import Config
from dbgpt._private.pydantic import BaseModel, Field
-from dbgpt.configs.model_config import EMBEDDING_MODEL_CONFIG, PILOT_PATH
+from dbgpt.configs.model_config import MODEL_PATH, PILOT_PATH
from dbgpt.core import Chunk
from dbgpt.core.awel import DAG, HttpTrigger, JoinOperator, MapOperator
from dbgpt.model.proxy import OpenAILLMClient
@@ -41,8 +41,7 @@ from dbgpt.rag.operators import (
QueryRewriteOperator,
RerankOperator,
)
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
CFG = Config()
@@ -78,21 +77,19 @@ def _context_join_fn(context_dict: Dict, chunks: List[Chunk]) -> Dict:
def _create_vector_connector():
"""Create vector connector."""
- model_name = os.getenv("EMBEDDING_MODEL", "text2vec")
- return VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="vector_name",
- persist_path=os.path.join(PILOT_PATH, "data"),
- ),
+ config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="embedding_rag_test",
embedding_fn=DefaultEmbeddingFactory(
- default_model_name=EMBEDDING_MODEL_CONFIG[CFG.EMBEDDING_MODEL],
+ default_model_name=os.path.join(MODEL_PATH, "text2vec-large-chinese"),
).create(),
)
+ return ChromaStore(config)
+
with DAG("simple_sdk_rag_retriever_example") as dag:
- vector_connector = _create_vector_connector()
+ vector_store = _create_vector_connector()
trigger = HttpTrigger(
"/examples/rag/retrieve", methods="POST", request_body=TriggerReqBody
)
@@ -102,11 +99,11 @@ with DAG("simple_sdk_rag_retriever_example") as dag:
rewrite_operator = QueryRewriteOperator(llm_client=OpenAILLMClient())
retriever_context_operator = EmbeddingRetrieverOperator(
top_k=3,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
retriever_operator = EmbeddingRetrieverOperator(
top_k=3,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
rerank_operator = RerankOperator()
model_parse_task = MapOperator(lambda out: out.to_dict())
diff --git a/examples/sdk/chat_data_with_awel.py b/examples/sdk/chat_data_with_awel.py
index e4e797e40..6beeeab28 100644
--- a/examples/sdk/chat_data_with_awel.py
+++ b/examples/sdk/chat_data_with_awel.py
@@ -4,6 +4,7 @@ import shutil
import pandas as pd
+from dbgpt.configs.model_config import PILOT_PATH
from dbgpt.core import (
ChatPromptTemplate,
HumanPromptTemplate,
@@ -27,8 +28,7 @@ from dbgpt.model.proxy import OpenAILLMClient
from dbgpt.rag import ChunkParameters
from dbgpt.rag.embedding import DefaultEmbeddingFactory
from dbgpt.rag.operators import DBSchemaAssemblerOperator, DBSchemaRetrieverOperator
-from dbgpt.storage.vector_store.chroma_store import ChromaVectorConfig
-from dbgpt.storage.vector_store.connector import VectorStoreConnector
+from dbgpt.storage.vector_store.chroma_store import ChromaStore, ChromaVectorConfig
# Delete old vector store directory(/tmp/awel_with_data_vector_store)
shutil.rmtree("/tmp/awel_with_data_vector_store", ignore_errors=True)
@@ -59,14 +59,12 @@ db_conn.create_temp_tables(
}
)
-vector_connector = VectorStoreConnector.from_default(
- "Chroma",
- vector_store_config=ChromaVectorConfig(
- name="db_schema_vector_store",
- persist_path="/tmp/awel_with_data_vector_store",
- ),
+config = ChromaVectorConfig(
+ persist_path=PILOT_PATH,
+ name="db_schema_vector_store",
embedding_fn=embeddings,
)
+vector_store = ChromaStore(config)
antv_charts = [
{"response_line_chart": "used to display comparative trend analysis data"},
@@ -198,7 +196,7 @@ with DAG("load_schema_dag") as load_schema_dag:
# Load database schema to vector store
assembler_task = DBSchemaAssemblerOperator(
connector=db_conn,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
chunk_parameters=ChunkParameters(chunk_strategy="CHUNK_BY_SIZE"),
)
input_task >> assembler_task
@@ -211,7 +209,7 @@ with DAG("chat_data_dag") as chat_data_dag:
input_task = InputOperator(input_source=InputSource.from_callable())
retriever_task = DBSchemaRetrieverOperator(
top_k=1,
- vector_store_connector=vector_connector,
+ index_store=vector_store,
)
content_task = MapOperator(lambda cks: [c.content for c in cks])
merge_task = JoinOperator(
diff --git a/web/components/knowledge/space-card.tsx b/web/components/knowledge/space-card.tsx
index 876076a25..0e5620227 100644
--- a/web/components/knowledge/space-card.tsx
+++ b/web/components/knowledge/space-card.tsx
@@ -71,7 +71,7 @@ export default function SpaceCard(props: IProps) {
Vector Store
Knowledge Graph
+ Full Text
label={t('Description')} name="description" rules={[{ required: true, message: t('Please_input_the_description') }]}>
diff --git a/web/public/models/knowledge-default.jpg b/web/public/models/knowledge-default.jpg
index 312b49a11..226bd405b 100644
Binary files a/web/public/models/knowledge-default.jpg and b/web/public/models/knowledge-default.jpg differ
diff --git a/web/public/models/knowledge-full-text.jpg b/web/public/models/knowledge-full-text.jpg
new file mode 100644
index 000000000..89e09d54a
Binary files /dev/null and b/web/public/models/knowledge-full-text.jpg differ