fix(storage): Fix storage connector error (#1547)

Co-authored-by: dong <dongzhancai@iie2.com>
Co-authored-by: Zhancai Dong <dongzhancai1@jd.com>
Co-authored-by: aries_ckt <916701291@qq.com>
This commit is contained in:
Cooper 2024-05-30 22:02:18 +08:00 committed by GitHub
parent a88af6f87d
commit 04703f2b3a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 30 additions and 33 deletions

View File

@ -109,6 +109,10 @@ class IndexStoreBase(ABC):
index_name(str): The name of index to delete.
"""
def vector_name_exists(self) -> bool:
"""Whether name exists."""
return True
def load_document_with_limit(
self, chunks: List[Chunk], max_chunks_once_load: int = 10, max_threads: int = 1
) -> List[str]:

View File

@ -87,6 +87,9 @@ class VectorStoreConnector:
- vector_store_type: vector store type Milvus, Chroma, Weaviate
- ctx: vector store config params.
"""
if vector_store_config is None:
raise Exception("vector_store_config is required")
self._index_store_config = vector_store_config
self._register()
@ -96,36 +99,25 @@ class VectorStoreConnector:
raise Exception(f"Vector store {vector_store_type} not supported")
logger.info(f"VectorStore:{self.connector_class}")
self._vector_store_type = vector_store_type
self._embeddings = (
vector_store_config.embedding_fn if vector_store_config else None
)
self._embeddings = vector_store_config.embedding_fn
config_dict = {}
for key in vector_store_config.to_dict().keys():
value = getattr(vector_store_config, key)
if value is not None:
config_dict[key] = value
for key, value in vector_store_config.model_extra.items():
if value is not None:
config_dict[key] = value
config = self.config_class(**config_dict)
try:
if vector_store_config is not None:
config: IndexStoreConfig = self.config_class()
config.name = getattr(vector_store_config, "name", "default_name")
config.embedding_fn = getattr(vector_store_config, "embedding_fn", None)
config.max_chunks_once_load = getattr(
vector_store_config, "max_chunks_once_load", 5
)
config.max_threads = getattr(vector_store_config, "max_threads", 4)
config.user = getattr(vector_store_config, "user", None)
config.password = getattr(vector_store_config, "password", None)
# extra
config_dict = vector_store_config.dict()
config.llm_client = config_dict.get("llm_client", None)
config.model_name = config_dict.get("model_name", None)
if (
vector_store_type in pools
and config.name in pools[vector_store_type]
):
self.client = pools[vector_store_type][config.name]
else:
client = self.connector_class(config)
pools[vector_store_type][config.name] = self.client = client
self.client = client
if vector_store_type in pools and config.name in pools[vector_store_type]:
self.client = pools[vector_store_type][config.name]
else:
client = self.connector_class(config)
pools[vector_store_type][config.name] = self.client = client
except Exception as e:
logger.error("connect vector store failed: %s", e)
raise e

View File

@ -57,12 +57,12 @@ async def main():
# initialize client
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
async for data in client.chat_stream(
model="chatgpt_proxyllm",
messages="hello",
):
print(data)
data = await client.chat(model="chatgpt_proxyllm", messages="hello")
# async for data in client.chat_stream(
# model="chatgpt_proxyllm",
# messages="hello",
# ):
print(data)
# res = await client.chat(model="chatgpt_proxyllm" ,messages="hello")
# print(res)

View File

@ -6,6 +6,7 @@ from dbgpt.model.proxy.llms.chatgpt import OpenAILLMClient
from dbgpt.rag import ChunkParameters
from dbgpt.rag.assembler import EmbeddingAssembler
from dbgpt.rag.knowledge import KnowledgeFactory
from dbgpt.storage.knowledge_graph.knowledge_graph import BuiltinKnowledgeGraphConfig
from dbgpt.storage.vector_store.base import VectorStoreConfig
from dbgpt.storage.vector_store.connector import VectorStoreConnector