mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-16 15:31:19 +00:00
fix:llm select add model_name
This commit is contained in:
parent
8320dc3cef
commit
1daaf23e70
@ -149,6 +149,7 @@ async def dialogue_list(user_id: str = None):
|
|||||||
conv_uid = item.get("conv_uid")
|
conv_uid = item.get("conv_uid")
|
||||||
summary = item.get("summary")
|
summary = item.get("summary")
|
||||||
chat_mode = item.get("chat_mode")
|
chat_mode = item.get("chat_mode")
|
||||||
|
model_name = item.get("model_name", CFG.LLM_MODEL)
|
||||||
|
|
||||||
messages = json.loads(item.get("messages"))
|
messages = json.loads(item.get("messages"))
|
||||||
last_round = max(messages, key=lambda x: x["chat_order"])
|
last_round = max(messages, key=lambda x: x["chat_order"])
|
||||||
@ -160,6 +161,7 @@ async def dialogue_list(user_id: str = None):
|
|||||||
conv_uid=conv_uid,
|
conv_uid=conv_uid,
|
||||||
user_input=summary,
|
user_input=summary,
|
||||||
chat_mode=chat_mode,
|
chat_mode=chat_mode,
|
||||||
|
model_name=model_name,
|
||||||
select_param=select_param,
|
select_param=select_param,
|
||||||
)
|
)
|
||||||
dialogues.append(conv_vo)
|
dialogues.append(conv_vo)
|
||||||
@ -259,6 +261,7 @@ def get_hist_messages(conv_uid: str):
|
|||||||
history_messages: List[OnceConversation] = history_mem.get_messages()
|
history_messages: List[OnceConversation] = history_mem.get_messages()
|
||||||
if history_messages:
|
if history_messages:
|
||||||
for once in history_messages:
|
for once in history_messages:
|
||||||
|
print(f"once:{once}")
|
||||||
model_name = once.get("model_name", CFG.LLM_MODEL)
|
model_name = once.get("model_name", CFG.LLM_MODEL)
|
||||||
once_message_vos = [
|
once_message_vos = [
|
||||||
message2Vo(element, once["chat_order"], model_name) for element in once["messages"]
|
message2Vo(element, once["chat_order"], model_name) for element in once["messages"]
|
||||||
|
@ -59,6 +59,7 @@ class BaseChat(ABC):
|
|||||||
|
|
||||||
self.history_message: List[OnceConversation] = self.memory.messages()
|
self.history_message: List[OnceConversation] = self.memory.messages()
|
||||||
self.current_message: OnceConversation = OnceConversation(self.chat_mode.value())
|
self.current_message: OnceConversation = OnceConversation(self.chat_mode.value())
|
||||||
|
self.current_message.model_name = self.llm_model
|
||||||
if chat_param["select_param"]:
|
if chat_param["select_param"]:
|
||||||
if len(self.chat_mode.param_types()) > 0:
|
if len(self.chat_mode.param_types()) > 0:
|
||||||
self.current_message.param_type = self.chat_mode.param_types()[0]
|
self.current_message.param_type = self.chat_mode.param_types()[0]
|
||||||
|
@ -105,7 +105,7 @@ def _conversation_to_dic(once: OnceConversation) -> dict:
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
"chat_mode": once.chat_mode,
|
"chat_mode": once.chat_mode,
|
||||||
"model_name": once.model_name if once.model_name else "proxyllm",
|
"model_name": once.model_name,
|
||||||
"chat_order": once.chat_order,
|
"chat_order": once.chat_order,
|
||||||
"start_date": start_str,
|
"start_date": start_str,
|
||||||
"cost": once.cost if once.cost else 0,
|
"cost": once.cost if once.cost else 0,
|
||||||
|
Loading…
Reference in New Issue
Block a user