feat:add storage client cache (#2810)

This commit is contained in:
Aries-ckt 2025-07-05 19:44:47 +08:00 committed by GitHub
parent d27fdb7928
commit 1eea9c8eec
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1,5 +1,6 @@
"""RAG STORAGE MANAGER manager."""
import threading
from typing import List, Optional, Type
from dbgpt import BaseComponent
@ -22,6 +23,8 @@ class StorageManager(BaseComponent):
def __init__(self, system_app: SystemApp):
"""Create a new ConnectorManager."""
self.system_app = system_app
self._store_cache = {}
self._cache_lock = threading.Lock()
super().__init__(system_app)
def init_app(self, system_app: SystemApp):
@ -62,17 +65,22 @@ class StorageManager(BaseComponent):
"""Create vector store."""
app_config = self.system_app.config.configs.get("app_config")
storage_config = app_config.rag.storage
if index_name in self._store_cache:
return self._store_cache[index_name]
with self._cache_lock:
embedding_factory = self.system_app.get_component(
"embedding_factory", EmbeddingFactory
)
embedding_fn = embedding_factory.create()
vector_store_config: VectorStoreConfig = storage_config.vector
return vector_store_config.create_store(
new_store = vector_store_config.create_store(
name=index_name,
embedding_fn=embedding_fn,
max_chunks_once_load=vector_store_config.max_chunks_once_load,
max_threads=vector_store_config.max_threads,
)
self._store_cache[index_name] = new_store
return new_store
def create_kg_store(
self, index_name, llm_model: Optional[str] = None
@ -81,6 +89,9 @@ class StorageManager(BaseComponent):
app_config = self.system_app.config.configs.get("app_config")
rag_config = app_config.rag
storage_config = app_config.rag.storage
if index_name in self._store_cache:
return self._store_cache[index_name]
with self._cache_lock:
worker_manager = self.system_app.get_component(
ComponentType.WORKER_MANAGER_FACTORY, WorkerManagerFactory
).create()
@ -92,7 +103,10 @@ class StorageManager(BaseComponent):
if storage_config.graph:
graph_config = storage_config.graph
graph_config.llm_model = llm_model
if hasattr(graph_config, "enable_summary") and graph_config.enable_summary:
if (
hasattr(graph_config, "enable_summary")
and graph_config.enable_summary
):
from dbgpt_ext.storage.knowledge_graph.community_summary import (
CommunitySummaryKnowledgeGraph,
)
@ -132,6 +146,9 @@ class StorageManager(BaseComponent):
app_config = self.system_app.config.configs.get("app_config")
rag_config = app_config.rag
storage_config = app_config.rag.storage
if index_name in self._store_cache:
return self._store_cache[index_name]
with self._cache_lock:
return ElasticDocumentStore(
es_config=storage_config.full_text,
name=index_name,