feat:chat_history add model_name

This commit is contained in:
aries_ckt 2023-09-11 17:59:25 +08:00
parent 83f2a8d68e
commit b32e0396e9
4 changed files with 13 additions and 4 deletions

View File

@ -259,8 +259,9 @@ def get_hist_messages(conv_uid: str):
history_messages: List[OnceConversation] = history_mem.get_messages() history_messages: List[OnceConversation] = history_mem.get_messages()
if history_messages: if history_messages:
for once in history_messages: for once in history_messages:
model_name = once.get("model_name", CFG.LLM_MODEL)
once_message_vos = [ once_message_vos = [
message2Vo(element, once["chat_order"]) for element in once["messages"] message2Vo(element, once["chat_order"], model_name) for element in once["messages"]
] ]
message_vos.extend(once_message_vos) message_vos.extend(once_message_vos)
return message_vos return message_vos
@ -381,7 +382,7 @@ async def stream_generator(chat):
chat.memory.append(chat.current_message) chat.memory.append(chat.current_message)
def message2Vo(message: dict, order) -> MessageVo: def message2Vo(message: dict, order, model_name) -> MessageVo:
return MessageVo( return MessageVo(
role=message["type"], context=message["data"]["content"], order=order role=message["type"], context=message["data"]["content"], order=order, model_name=model_name
) )

View File

@ -78,3 +78,8 @@ class MessageVo(BaseModel):
time the current message was sent time the current message was sent
""" """
time_stamp: Any = None time_stamp: Any = None
"""
model_name
"""
model_name: str

View File

@ -38,7 +38,7 @@ class BaseChat(ABC):
self.chat_session_id = chat_param["chat_session_id"] self.chat_session_id = chat_param["chat_session_id"]
self.chat_mode = chat_param["chat_mode"] self.chat_mode = chat_param["chat_mode"]
self.current_user_input: str = chat_param["current_user_input"] self.current_user_input: str = chat_param["current_user_input"]
self.llm_model = chat_param["model_name"] self.llm_model = chat_param["model_name"] if chat_param["model_name"] else CFG.LLM_MODEL
self.llm_echo = False self.llm_echo = False
### load prompt template ### load prompt template

View File

@ -23,6 +23,7 @@ class OnceConversation:
self.messages: List[BaseMessage] = [] self.messages: List[BaseMessage] = []
self.start_date: str = "" self.start_date: str = ""
self.chat_order: int = 0 self.chat_order: int = 0
self.model_name: str = ""
self.param_type: str = "" self.param_type: str = ""
self.param_value: str = "" self.param_value: str = ""
self.cost: int = 0 self.cost: int = 0
@ -104,6 +105,7 @@ def _conversation_to_dic(once: OnceConversation) -> dict:
return { return {
"chat_mode": once.chat_mode, "chat_mode": once.chat_mode,
"model_name": once.model_name if once.model_name else "proxyllm",
"chat_order": once.chat_order, "chat_order": once.chat_order,
"start_date": start_str, "start_date": start_str,
"cost": once.cost if once.cost else 0, "cost": once.cost if once.cost else 0,
@ -127,6 +129,7 @@ def conversation_from_dict(once: dict) -> OnceConversation:
conversation.chat_order = int(once.get("chat_order")) conversation.chat_order = int(once.get("chat_order"))
conversation.param_type = once.get("param_type", "") conversation.param_type = once.get("param_type", "")
conversation.param_value = once.get("param_value", "") conversation.param_value = once.get("param_value", "")
conversation.model_name = once.get("model_name", "proxyllm")
print(once.get("messages")) print(once.get("messages"))
conversation.messages = messages_from_dict(once.get("messages", [])) conversation.messages = messages_from_dict(once.get("messages", []))
return conversation return conversation