mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-01 16:18:27 +00:00
style:fmt
This commit is contained in:
parent
84fbebda4c
commit
a7ec3400fe
@ -165,7 +165,9 @@ class Config(metaclass=Singleton):
|
||||
self.KNOWLEDGE_CHUNK_SIZE = int(os.getenv("KNOWLEDGE_CHUNK_SIZE", 100))
|
||||
self.KNOWLEDGE_CHUNK_OVERLAP = int(os.getenv("KNOWLEDGE_CHUNK_OVERLAP", 100))
|
||||
self.KNOWLEDGE_SEARCH_TOP_SIZE = int(os.getenv("KNOWLEDGE_SEARCH_TOP_SIZE", 5))
|
||||
self.KNOWLEDGE_SEARCH_MAX_TOKEN = int(os.getenv("KNOWLEDGE_SEARCH_MAX_TOKEN", 2000))
|
||||
self.KNOWLEDGE_SEARCH_MAX_TOKEN = int(
|
||||
os.getenv("KNOWLEDGE_SEARCH_MAX_TOKEN", 2000)
|
||||
)
|
||||
### SUMMARY_CONFIG Configuration
|
||||
self.SUMMARY_CONFIG = os.getenv("SUMMARY_CONFIG", "FAST")
|
||||
|
||||
|
@ -12,7 +12,7 @@ from pilot.scene.chat_normal.out_parser import NormalChatOutputParser
|
||||
CFG = Config()
|
||||
|
||||
PROMPT_SCENE_DEFINE = """A chat between a curious user and an artificial intelligence assistant, who very familiar with database related knowledge.
|
||||
The assistant gives helpful, detailed, professional and polite answers to the user's questions. """
|
||||
The assistant gives helpful, detailed, professional and polite answers to the user's questions. """
|
||||
|
||||
|
||||
_DEFAULT_TEMPLATE_ZH = """ 基于以下已知的信息, 专业、简要的回答用户的问题,
|
||||
|
@ -188,7 +188,7 @@ class KnowledgeService:
|
||||
text_splitter = SpacyTextSplitter(
|
||||
pipeline="zh_core_web_sm",
|
||||
chunk_size=chunk_size,
|
||||
chunk_overlap=chunk_overlap
|
||||
chunk_overlap=chunk_overlap,
|
||||
)
|
||||
except Exception:
|
||||
text_splitter = RecursiveCharacterTextSplitter(
|
||||
|
@ -26,10 +26,10 @@ class ChromaStore(VectorStoreBase):
|
||||
persist_directory=self.persist_dir,
|
||||
embedding_function=self.embeddings,
|
||||
client_settings=chroma_settings,
|
||||
collection_metadata=collection_metadata
|
||||
collection_metadata=collection_metadata,
|
||||
)
|
||||
|
||||
def similar_search(self, text, topk, **kwargs: Any) -> None:
|
||||
def similar_search(self, text, topk, **kwargs: Any) -> None:
|
||||
logger.info("ChromaStore similar search")
|
||||
return self.vector_store_client.similarity_search(text, topk)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user