Bug fixes on chat history filter plus removed system prompt

This commit is contained in:
Saurab-Shrestha9639*969**9858//852 2024-04-08 17:59:01 +05:45
parent fb64e15802
commit 7dab3edebf
9 changed files with 21 additions and 21 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -163,7 +163,9 @@ class ChatService:
else None
)
system_prompt = (
"You can only answer questions about the provided context. If you know the answer but it is not based in the provided context, don't provide the answer, just state the answer is not in the context provided."
chat_engine_input.system_message.content
if chat_engine_input.system_message
else None
)
chat_history = (
chat_engine_input.chat_history if chat_engine_input.chat_history else None

View File

@ -145,7 +145,6 @@ async def prompt_completion(
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
detail=f"No documents uploaded for your department.")
docs_list = [document.filename for document in documents]
print("DOCUMENTS ASSIGNED TO THIS DEPARTMENTS: ", docs_list)
docs_ids = []
for filename in docs_list:
doc_id = service.get_doc_ids_by_filename(filename)
@ -165,7 +164,7 @@ async def prompt_completion(
user_message_json = {
'text': body.prompt,
}
create_chat_item(db, "user", json.dumps(user_message_json) , body.conversation_id)
create_chat_item(db, "user", user_message_json , body.conversation_id)
messages = [user_message]
@ -192,22 +191,9 @@ async def prompt_completion(
chat_response = await chat_completion(request, chat_body)
ai_response = chat_response.model_dump(mode="json")
text_list = [choice['message']['content'] for choice in ai_response['choices']]
sources_list = [source['document']['doc_metadata'] for choice in ai_response['choices'] for source in choice['sources']]
ai_response_json = {
'text': text_list[0],
'sources': sources_list
}
ai_response_json_str = json.dumps(ai_response_json)
print("The ai response: ",ai_response_json_str)
create_chat_item(db, "assistant", ai_response, body.conversation_id)
return chat_response
except Exception as e:
print(traceback.format_exc())
logger.error(f"There was an error: {str(e)}")

View File

@ -25,7 +25,7 @@ def list_chat_histories(
Retrieve a list of chat histories with pagination support.
"""
try:
chat_histories = crud.chat.get_multi(
chat_histories = crud.chat.get_chat_history(
db, skip=skip, limit=limit)
return chat_histories
except Exception as e:

View File

@ -1,3 +1,4 @@
from sqlalchemy.sql.expression import desc, asc
from typing import Optional, List, Union, Dict, Any
from fastapi import HTTPException, status
from sqlalchemy.orm import Session
@ -12,6 +13,16 @@ class CRUDChat(CRUDBase[ChatHistory, ChatHistoryCreate, ChatHistoryCreate]):
def get_by_id(self, db: Session, *, id: uuid.UUID) -> Optional[ChatHistory]:
return db.query(self.model).filter(ChatHistory.conversation_id == id).first()
def get_chat_history(
self, db: Session, *, skip: int = 0, limit: int = 100
) -> List[ChatHistory]:
return (
db.query(self.model)
.order_by(desc(getattr(ChatHistory, 'created_at')))
.offset(skip)
.limit(limit)
.all()
)
class CRUDChatItem(CRUDBase[ChatItem, ChatItemCreate, ChatItemUpdate]):
pass

View File

@ -35,9 +35,10 @@ class CRUDDocuments(CRUDBase[Document, DocumentCreate, DocumentUpdate]):
.join(document_department_association)
.join(Department)
.filter(document_department_association.c.department_id == department_id)
.order_by(desc(getattr(Document, 'uploaded_at')))
.offset(skip)
.limit(limit)
.all().order_by(desc(getattr(Document, 'uploaded_at')))
.all()
)
def get_files_to_verify(

View File

@ -59,7 +59,7 @@ embedding:
ingest_mode: simple
huggingface:
embedding_hf_model_name: BAAI/bge-large-en-v1.5
embedding_hf_model_name: mixedbread-ai/mxbai-embed-large-v1
vectorstore:
database: qdrant