mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-11 05:02:15 +00:00
style:delete milvus_store.py unusual code
This commit is contained in:
commit
fec82a7ee7
@ -158,7 +158,7 @@ class LLMModelAdaper:
|
|||||||
else:
|
else:
|
||||||
raise ValueError(f"Unknown role: {role}")
|
raise ValueError(f"Unknown role: {role}")
|
||||||
|
|
||||||
can_use_systems:[] = []
|
can_use_systems: [] = []
|
||||||
if system_messages:
|
if system_messages:
|
||||||
if len(system_messages) > 1:
|
if len(system_messages) > 1:
|
||||||
## Compatible with dbgpt complex scenarios, the last system will protect more complete information entered by the current user
|
## Compatible with dbgpt complex scenarios, the last system will protect more complete information entered by the current user
|
||||||
|
@ -58,42 +58,6 @@ def _initialize_openai(params: ProxyModelParameters):
|
|||||||
return openai_params
|
return openai_params
|
||||||
|
|
||||||
|
|
||||||
def __convert_2_gpt_messages(messages: List[ModelMessage]):
|
|
||||||
|
|
||||||
chat_round = 0
|
|
||||||
gpt_messages = []
|
|
||||||
|
|
||||||
last_usr_message = ""
|
|
||||||
system_messages = []
|
|
||||||
|
|
||||||
for message in messages:
|
|
||||||
if message.role == ModelMessageRoleType.HUMAN:
|
|
||||||
last_usr_message = message.content
|
|
||||||
elif message.role == ModelMessageRoleType.SYSTEM:
|
|
||||||
system_messages.append(message.content)
|
|
||||||
elif message.role == ModelMessageRoleType.AI:
|
|
||||||
last_ai_message = message.content
|
|
||||||
gpt_messages.append({"role": "user", "content": last_usr_message})
|
|
||||||
gpt_messages.append({"role": "assistant", "content": last_ai_message})
|
|
||||||
|
|
||||||
# build last user messge
|
|
||||||
|
|
||||||
if len(system_messages) >0:
|
|
||||||
if len(system_messages) > 1:
|
|
||||||
end_message = system_messages[-1]
|
|
||||||
else:
|
|
||||||
last_message = messages[-1]
|
|
||||||
if last_message.role == ModelMessageRoleType.HUMAN:
|
|
||||||
end_message = system_messages[-1] + "\n" + last_message.content
|
|
||||||
else:
|
|
||||||
end_message = system_messages[-1]
|
|
||||||
else:
|
|
||||||
last_message = messages[-1]
|
|
||||||
end_message = last_message.content
|
|
||||||
gpt_messages.append({"role": "user", "content": end_message})
|
|
||||||
return gpt_messages, system_messages
|
|
||||||
|
|
||||||
|
|
||||||
def _initialize_openai_v1(params: ProxyModelParameters):
|
def _initialize_openai_v1(params: ProxyModelParameters):
|
||||||
try:
|
try:
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
|
@ -168,61 +168,6 @@ class MilvusStore(VectorStoreBase):
|
|||||||
|
|
||||||
return ids
|
return ids
|
||||||
|
|
||||||
# def init_schema(self) -> None:
|
|
||||||
# """Initialize collection in milvus database."""
|
|
||||||
# fields = [
|
|
||||||
# FieldSchema(name="pk", dtype=DataType.INT64, is_primary=True, auto_id=True),
|
|
||||||
# FieldSchema(name="vector", dtype=DataType.FLOAT_VECTOR, dim=self.model_config["dim"]),
|
|
||||||
# FieldSchema(name="raw_text", dtype=DataType.VARCHAR, max_length=65535),
|
|
||||||
# ]
|
|
||||||
#
|
|
||||||
# # create collection if not exist and load it.
|
|
||||||
# self.schema = CollectionSchema(fields, "db-gpt memory storage")
|
|
||||||
# self.collection = Collection(self.collection_name, self.schema)
|
|
||||||
# self.index_params_map = {
|
|
||||||
# "IVF_FLAT": {"params": {"nprobe": 10}},
|
|
||||||
# "IVF_SQ8": {"params": {"nprobe": 10}},
|
|
||||||
# "IVF_PQ": {"params": {"nprobe": 10}},
|
|
||||||
# "HNSW": {"params": {"ef": 10}},
|
|
||||||
# "RHNSW_FLAT": {"params": {"ef": 10}},
|
|
||||||
# "RHNSW_SQ": {"params": {"ef": 10}},
|
|
||||||
# "RHNSW_PQ": {"params": {"ef": 10}},
|
|
||||||
# "IVF_HNSW": {"params": {"nprobe": 10, "ef": 10}},
|
|
||||||
# "ANNOY": {"params": {"search_k": 10}},
|
|
||||||
# }
|
|
||||||
#
|
|
||||||
# self.index_params = {
|
|
||||||
# "metric_type": "IP",
|
|
||||||
# "index_type": "HNSW",
|
|
||||||
# "params": {"M": 8, "efConstruction": 64},
|
|
||||||
# }
|
|
||||||
# # create index if not exist.
|
|
||||||
# if not self.collection.has_index():
|
|
||||||
# self.collection.release()
|
|
||||||
# self.collection.create_index(
|
|
||||||
# "vector",
|
|
||||||
# self.index_params,
|
|
||||||
# index_name="vector",
|
|
||||||
# )
|
|
||||||
# info = self.collection.describe()
|
|
||||||
# self.collection.load()
|
|
||||||
|
|
||||||
# def insert(self, text, model_config) -> str:
|
|
||||||
# """Add an embedding of data into milvus.
|
|
||||||
# Args:
|
|
||||||
# text (str): The raw text to construct embedding index.
|
|
||||||
# Returns:
|
|
||||||
# str: log.
|
|
||||||
# """
|
|
||||||
# # embedding = get_ada_embedding(data)
|
|
||||||
# embeddings = HuggingFaceEmbeddings(model_name=self.model_config["model_name"])
|
|
||||||
# result = self.collection.insert([embeddings.embed_documents(text), text])
|
|
||||||
# _text = (
|
|
||||||
# "Inserting data into memory at primary key: "
|
|
||||||
# f"{result.primary_keys[0]}:\n data: {text}"
|
|
||||||
# )
|
|
||||||
# return _text
|
|
||||||
|
|
||||||
def _add_documents(
|
def _add_documents(
|
||||||
self,
|
self,
|
||||||
texts: Iterable[str],
|
texts: Iterable[str],
|
||||||
|
1
setup.py
1
setup.py
@ -317,6 +317,7 @@ def core_requires():
|
|||||||
# TODO move transformers to default
|
# TODO move transformers to default
|
||||||
"transformers>=4.31.0",
|
"transformers>=4.31.0",
|
||||||
"alembic==1.12.0",
|
"alembic==1.12.0",
|
||||||
|
# for excel
|
||||||
"openpyxl==3.1.2",
|
"openpyxl==3.1.2",
|
||||||
"chardet==5.1.0",
|
"chardet==5.1.0",
|
||||||
"xlrd==2.0.1",
|
"xlrd==2.0.1",
|
||||||
|
Loading…
Reference in New Issue
Block a user