mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-11 13:12:18 +00:00
fix: custom chat exception (#257)
fix custom chat exception raise user can understand error
This commit is contained in:
commit
31e7ea8df9
@ -1,3 +1,5 @@
|
|||||||
|
from chromadb.errors import NoIndexException
|
||||||
|
|
||||||
from pilot.scene.base_chat import BaseChat, logger, headers
|
from pilot.scene.base_chat import BaseChat, logger, headers
|
||||||
from pilot.scene.base import ChatScene
|
from pilot.scene.base import ChatScene
|
||||||
from pilot.common.sql_database import Database
|
from pilot.common.sql_database import Database
|
||||||
@ -50,13 +52,19 @@ class ChatNewKnowledge(BaseChat):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def generate_input_values(self):
|
def generate_input_values(self):
|
||||||
docs = self.knowledge_embedding_client.similar_search(
|
try:
|
||||||
self.current_user_input, CFG.KNOWLEDGE_SEARCH_TOP_SIZE
|
docs = self.knowledge_embedding_client.similar_search(
|
||||||
)
|
self.current_user_input, CFG.KNOWLEDGE_SEARCH_TOP_SIZE
|
||||||
context = [d.page_content for d in docs]
|
)
|
||||||
self.metadata = [d.metadata for d in docs]
|
context = [d.page_content for d in docs]
|
||||||
context = context[:2000]
|
self.metadata = [d.metadata for d in docs]
|
||||||
input_values = {"context": context, "question": self.current_user_input}
|
context = context[:2000]
|
||||||
|
input_values = {"context": context, "question": self.current_user_input}
|
||||||
|
except NoIndexException:
|
||||||
|
raise ValueError(
|
||||||
|
f"you have no {self.knowledge_name} knowledge store, please upload your knowledge"
|
||||||
|
)
|
||||||
|
|
||||||
return input_values
|
return input_values
|
||||||
|
|
||||||
def do_with_prompt_response(self, prompt_response):
|
def do_with_prompt_response(self, prompt_response):
|
||||||
|
@ -25,7 +25,6 @@ from pilot.configs.config import Config
|
|||||||
from pilot.configs.model_config import (
|
from pilot.configs.model_config import (
|
||||||
DATASETS_DIR,
|
DATASETS_DIR,
|
||||||
KNOWLEDGE_UPLOAD_ROOT_PATH,
|
KNOWLEDGE_UPLOAD_ROOT_PATH,
|
||||||
LLM_MODEL_CONFIG,
|
|
||||||
LOGDIR,
|
LOGDIR,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -632,7 +631,7 @@ def knowledge_embedding_store(vs_id, files):
|
|||||||
)
|
)
|
||||||
knowledge_embedding_client = KnowledgeEmbedding(
|
knowledge_embedding_client = KnowledgeEmbedding(
|
||||||
file_path=os.path.join(KNOWLEDGE_UPLOAD_ROOT_PATH, vs_id, filename),
|
file_path=os.path.join(KNOWLEDGE_UPLOAD_ROOT_PATH, vs_id, filename),
|
||||||
model_name=LLM_MODEL_CONFIG["text2vec"],
|
model_name=CFG.EMBEDDING_MODEL,
|
||||||
vector_store_config={
|
vector_store_config={
|
||||||
"vector_store_name": vector_store_name["vs_name"],
|
"vector_store_name": vector_store_name["vs_name"],
|
||||||
"vector_store_path": KNOWLEDGE_UPLOAD_ROOT_PATH,
|
"vector_store_path": KNOWLEDGE_UPLOAD_ROOT_PATH,
|
||||||
|
Loading…
Reference in New Issue
Block a user