mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-01 16:18:27 +00:00
style:delete milvus_store.py unusual code
This commit is contained in:
commit
fec82a7ee7
@ -158,7 +158,7 @@ class LLMModelAdaper:
|
||||
else:
|
||||
raise ValueError(f"Unknown role: {role}")
|
||||
|
||||
can_use_systems:[] = []
|
||||
can_use_systems: [] = []
|
||||
if system_messages:
|
||||
if len(system_messages) > 1:
|
||||
## Compatible with dbgpt complex scenarios, the last system will protect more complete information entered by the current user
|
||||
@ -166,7 +166,7 @@ class LLMModelAdaper:
|
||||
can_use_systems = system_messages[:-1]
|
||||
else:
|
||||
can_use_systems = system_messages
|
||||
for i in range(len(user_messages)):
|
||||
for i in range(len(user_messages)):
|
||||
# TODO vicuna 兼容 测试完放弃
|
||||
user_messages[-1] = system_messages[-1]
|
||||
if len(system_messages) > 1:
|
||||
|
@ -58,42 +58,6 @@ def _initialize_openai(params: ProxyModelParameters):
|
||||
return openai_params
|
||||
|
||||
|
||||
def __convert_2_gpt_messages(messages: List[ModelMessage]):
|
||||
|
||||
chat_round = 0
|
||||
gpt_messages = []
|
||||
|
||||
last_usr_message = ""
|
||||
system_messages = []
|
||||
|
||||
for message in messages:
|
||||
if message.role == ModelMessageRoleType.HUMAN:
|
||||
last_usr_message = message.content
|
||||
elif message.role == ModelMessageRoleType.SYSTEM:
|
||||
system_messages.append(message.content)
|
||||
elif message.role == ModelMessageRoleType.AI:
|
||||
last_ai_message = message.content
|
||||
gpt_messages.append({"role": "user", "content": last_usr_message})
|
||||
gpt_messages.append({"role": "assistant", "content": last_ai_message})
|
||||
|
||||
# build last user messge
|
||||
|
||||
if len(system_messages) >0:
|
||||
if len(system_messages) > 1:
|
||||
end_message = system_messages[-1]
|
||||
else:
|
||||
last_message = messages[-1]
|
||||
if last_message.role == ModelMessageRoleType.HUMAN:
|
||||
end_message = system_messages[-1] + "\n" + last_message.content
|
||||
else:
|
||||
end_message = system_messages[-1]
|
||||
else:
|
||||
last_message = messages[-1]
|
||||
end_message = last_message.content
|
||||
gpt_messages.append({"role": "user", "content": end_message})
|
||||
return gpt_messages, system_messages
|
||||
|
||||
|
||||
def _initialize_openai_v1(params: ProxyModelParameters):
|
||||
try:
|
||||
from openai import OpenAI
|
||||
|
@ -168,61 +168,6 @@ class MilvusStore(VectorStoreBase):
|
||||
|
||||
return ids
|
||||
|
||||
# def init_schema(self) -> None:
|
||||
# """Initialize collection in milvus database."""
|
||||
# fields = [
|
||||
# FieldSchema(name="pk", dtype=DataType.INT64, is_primary=True, auto_id=True),
|
||||
# FieldSchema(name="vector", dtype=DataType.FLOAT_VECTOR, dim=self.model_config["dim"]),
|
||||
# FieldSchema(name="raw_text", dtype=DataType.VARCHAR, max_length=65535),
|
||||
# ]
|
||||
#
|
||||
# # create collection if not exist and load it.
|
||||
# self.schema = CollectionSchema(fields, "db-gpt memory storage")
|
||||
# self.collection = Collection(self.collection_name, self.schema)
|
||||
# self.index_params_map = {
|
||||
# "IVF_FLAT": {"params": {"nprobe": 10}},
|
||||
# "IVF_SQ8": {"params": {"nprobe": 10}},
|
||||
# "IVF_PQ": {"params": {"nprobe": 10}},
|
||||
# "HNSW": {"params": {"ef": 10}},
|
||||
# "RHNSW_FLAT": {"params": {"ef": 10}},
|
||||
# "RHNSW_SQ": {"params": {"ef": 10}},
|
||||
# "RHNSW_PQ": {"params": {"ef": 10}},
|
||||
# "IVF_HNSW": {"params": {"nprobe": 10, "ef": 10}},
|
||||
# "ANNOY": {"params": {"search_k": 10}},
|
||||
# }
|
||||
#
|
||||
# self.index_params = {
|
||||
# "metric_type": "IP",
|
||||
# "index_type": "HNSW",
|
||||
# "params": {"M": 8, "efConstruction": 64},
|
||||
# }
|
||||
# # create index if not exist.
|
||||
# if not self.collection.has_index():
|
||||
# self.collection.release()
|
||||
# self.collection.create_index(
|
||||
# "vector",
|
||||
# self.index_params,
|
||||
# index_name="vector",
|
||||
# )
|
||||
# info = self.collection.describe()
|
||||
# self.collection.load()
|
||||
|
||||
# def insert(self, text, model_config) -> str:
|
||||
# """Add an embedding of data into milvus.
|
||||
# Args:
|
||||
# text (str): The raw text to construct embedding index.
|
||||
# Returns:
|
||||
# str: log.
|
||||
# """
|
||||
# # embedding = get_ada_embedding(data)
|
||||
# embeddings = HuggingFaceEmbeddings(model_name=self.model_config["model_name"])
|
||||
# result = self.collection.insert([embeddings.embed_documents(text), text])
|
||||
# _text = (
|
||||
# "Inserting data into memory at primary key: "
|
||||
# f"{result.primary_keys[0]}:\n data: {text}"
|
||||
# )
|
||||
# return _text
|
||||
|
||||
def _add_documents(
|
||||
self,
|
||||
texts: Iterable[str],
|
||||
|
Loading…
Reference in New Issue
Block a user