update with new cascade

This commit is contained in:
Saurab-Shrestha9639*969**9858//852 2024-03-17 11:16:34 +05:45
parent de39e594f8
commit 2bcc7f589f
19 changed files with 120 additions and 70 deletions

4
.env
View File

@ -4,8 +4,8 @@ ENVIRONMENT=dev
DB_HOST=localhost DB_HOST=localhost
DB_USER=postgres DB_USER=postgres
DB_PORT=5432 DB_PORT=5432
DB_PASSWORD=quick DB_PASSWORD=admin
DB_NAME=QuickGpt DB_NAME=GPT
SUPER_ADMIN_EMAIL=superadmin@email.com SUPER_ADMIN_EMAIL=superadmin@email.com
SUPER_ADMIN_PASSWORD=supersecretpassword SUPER_ADMIN_PASSWORD=supersecretpassword

View File

@ -0,0 +1,38 @@
"""udpate cascade
Revision ID: 39c817e4fc4a
Revises: cb320e7880fc
Create Date: 2024-03-17 11:08:22.426368
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '39c817e4fc4a'
down_revision: Union[str, None] = 'cb320e7880fc'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('document_department_association_document_id_fkey', 'document_department_association', type_='foreignkey')
op.drop_constraint('document_department_association_department_id_fkey', 'document_department_association', type_='foreignkey')
op.create_foreign_key(None, 'document_department_association', 'document', ['document_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
op.create_foreign_key(None, 'document_department_association', 'departments', ['department_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
# op.create_unique_constraint('unique_user_role', 'user_roles', ['user_id', 'role_id', 'company_id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# op.drop_constraint('unique_user_role', 'user_roles', type_='unique')
op.drop_constraint(None, 'document_department_association', type_='foreignkey')
op.drop_constraint(None, 'document_department_association', type_='foreignkey')
op.create_foreign_key('document_department_association_department_id_fkey', 'document_department_association', 'departments', ['department_id'], ['id'])
op.create_foreign_key('document_department_association_document_id_fkey', 'document_department_association', 'document', ['document_id'], ['id'])
# ### end Alembic commands ###

View File

@ -1,8 +1,8 @@
"""Create models """update
Revision ID: 6b17fe6b632e Revision ID: cb320e7880fc
Revises: Revises:
Create Date: 2024-03-16 19:20:30.169049 Create Date: 2024-03-17 10:09:40.034197
""" """
from typing import Sequence, Union from typing import Sequence, Union
@ -12,7 +12,7 @@ import sqlalchemy as sa
from sqlalchemy.dialects import postgresql from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = '6b17fe6b632e' revision: str = 'cb320e7880fc'
down_revision: Union[str, None] = None down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
{"graph_dict": {}}

View File

@ -0,0 +1 @@
{"embedding_dict": {}, "text_id_to_ref_doc_id": {}, "metadata_dict": {}}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
tmp lock file

View File

@ -0,0 +1 @@
{"collections": {"make_this_parameterizable_per_api_call": {"vectors": {"size": 768, "distance": "Cosine", "hnsw_config": null, "quantization_config": null, "on_disk": null}, "shard_number": null, "sharding_method": null, "replication_factor": null, "write_consistency_factor": null, "on_disk_payload": null, "hnsw_config": null, "wal_config": null, "optimizers_config": null, "init_from": null, "quantization_config": null, "sparse_vectors": null}}, "aliases": {}}

View File

@ -18,7 +18,7 @@ from private_gpt.server.chunks.chunks_router import chunks_router
from private_gpt.server.ingest.ingest_router import ingest_router from private_gpt.server.ingest.ingest_router import ingest_router
from private_gpt.components.ocr_components.table_ocr_api import pdf_router from private_gpt.components.ocr_components.table_ocr_api import pdf_router
from private_gpt.server.completions.completions_router import completions_router from private_gpt.server.completions.completions_router import completions_router
from private_gpt.server.embeddings.embeddings_router import embxeddings_router from private_gpt.server.embeddings.embeddings_router import embeddings_router
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -200,12 +200,14 @@ async def create_documents(
status_code=409, status_code=409,
detail="File already exists. Choose a different file.", detail="File already exists. Choose a different file.",
) )
print(f"{file_name} uploaded by {current_user.id} action {MakerCheckerActionType.INSERT.value} and status {MakerCheckerStatus.PENDING.value}")
docs_in = schemas.DocumentMakerCreate( docs_in = schemas.DocumentMakerCreate(
filename=file_name, filename=file_name,
uploaded_by=current_user.id, uploaded_by=current_user.id,
action_type=MakerCheckerActionType.INSERT, action_type=MakerCheckerActionType.INSERT,
status=MakerCheckerStatus.PENDING status=MakerCheckerStatus.PENDING
) )
print("DOCUMENT CREATE: ", docs_in)
document = crud.documents.create(db=db, obj_in=docs_in) document = crud.documents.create(db=db, obj_in=docs_in)
department_ids = [int(number) for number in department_ids.split(",")] department_ids = [int(number) for number in department_ids.split(",")]
for department_id in department_ids: for department_id in department_ids:

View File

@ -41,12 +41,12 @@ def register_user(
company_id=company.id, company_id=company.id,
department_id=department.id, department_id=department.id,
) )
try: # try:
send_registration_email(fullname, email, password) # send_registration_email(fullname, email, password)
except Exception as e: # except Exception as e:
logging.info(f"Failed to send registration email: {str(e)}") # logging.info(f"Failed to send registration email: {str(e)}")
raise HTTPException( # raise HTTPException(
status_code=500, detail=f"Failed to send registration email.") # status_code=500, detail=f"Failed to send registration email.")
return crud.user.create(db, obj_in=user_in) return crud.user.create(db, obj_in=user_in)
@ -82,7 +82,7 @@ def create_token_payload(user: models.User, user_role: models.UserRole) -> dict:
"id": str(user.id), "id": str(user.id),
"email": str(user.email), "email": str(user.email),
"role": user_role.role.name, "role": user_role.role.name,
"username": str(user.fullname), "username": str(user.username),
"company_id": user_role.company.id if user_role.company else None, "company_id": user_role.company.id if user_role.company else None,
"department_id": user.department_id "department_id": user.department_id
} }
@ -173,7 +173,7 @@ def login_access_token(
token_payload = { token_payload = {
"id": str(user.id), "id": str(user.id),
"email": str(user.email), "email": str(user.email),
"username": str(user.fullname), "username": str(user.username),
"role": role, "role": role,
"company_id": company_id, "company_id": company_id,
"department_id": str(user.department_id), "department_id": str(user.department_id),
@ -229,7 +229,7 @@ def register(
db: Session = Depends(deps.get_db), db: Session = Depends(deps.get_db),
email: str = Body(...), email: str = Body(...),
fullname: str = Body(...), fullname: str = Body(...),
password: str = Body(...), # password: str = Body(...),
department_id: int = Body(None, title="Department ID", department_id: int = Body(None, title="Department ID",
description="Department name for the user (if applicable)"), description="Department name for the user (if applicable)"),
role_name: str = Body(None, title="Role Name", role_name: str = Body(None, title="Role Name",
@ -249,14 +249,14 @@ def register(
model='User', model='User',
action='creation', action='creation',
details={"status": '409', 'detail': "The user with this email already exists!", }, details={"status": '409', 'detail': "The user with this email already exists!", },
user_id=current_user.id # user_id=current_user.id
) )
raise HTTPException( raise HTTPException(
status_code=409, status_code=409,
detail="The user with this email already exists!", detail="The user with this email already exists!",
) )
# random_password = security.generate_random_password() random_password = security.generate_random_password()
random_password = password # random_password = password
try: try:
company_id = current_user.company_id company_id = current_user.company_id

View File

@ -14,7 +14,7 @@ from private_gpt.constants import UNCHECKED_DIR
from private_gpt.users.constants.role import Role from private_gpt.users.constants.role import Role
from private_gpt.users import crud, models, schemas from private_gpt.users import crud, models, schemas
from private_gpt.server.ingest.ingest_router import create_documents, ingest from private_gpt.server.ingest.ingest_router import create_documents, ingest
from private_gpt.users.models.makerchecker import MakerCheckerActionType, MakerCheckerStatus from private_gpt.users.models.document import MakerCheckerActionType, MakerCheckerStatus
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
router = APIRouter(prefix='/documents', tags=['Documents']) router = APIRouter(prefix='/documents', tags=['Documents'])
@ -284,7 +284,9 @@ async def verify_documents(
detail="Document not found!", detail="Document not found!",
) )
unchecked_path = Path(f"{UNCHECKED_DIR}/{document.filename}") unchecked_path = Path(f"{UNCHECKED_DIR}/{document.filename}")
if checker_in.status == MakerCheckerStatus.APPROVED: print(checker_in.status)
print(MakerCheckerStatus.APPROVED.value)
if checker_in.status == MakerCheckerStatus.APPROVED.value:
checker = schemas.DocumentCheckerUpdate( checker = schemas.DocumentCheckerUpdate(
status=MakerCheckerStatus.APPROVED, status=MakerCheckerStatus.APPROVED,
is_enabled=checker_in.is_enabled, is_enabled=checker_in.is_enabled,
@ -294,13 +296,13 @@ async def verify_documents(
crud.documents.update(db=db, db_obj= document, obj_in=checker) crud.documents.update(db=db, db_obj= document, obj_in=checker)
if document.doc_type_id == 2: if document.doc_type_id == 2:
return ingest(request, unchecked_path) return await ingest(request, unchecked_path)
elif document.doc_type_id == 3: elif document.doc_type_id == 3:
return ingest(request, unchecked_path) return await ingest(request, unchecked_path)
else: else:
return ingest(request, unchecked_path) return await ingest(request, unchecked_path)
elif checker_in.status == MakerCheckerStatus.REJECTED: elif checker_in.status == MakerCheckerStatus.REJECTED.value:
checker = schemas.DocumentCheckerUpdate( checker = schemas.DocumentCheckerUpdate(
status=MakerCheckerStatus.REJECTED, status=MakerCheckerStatus.REJECTED,
is_enabled=checker_in.is_enabled, is_enabled=checker_in.is_enabled,

View File

@ -18,7 +18,7 @@ class CRUDUser(CRUDBase[User, UserCreate, UserUpdate]):
db_obj = User( db_obj = User(
email=obj_in.email, email=obj_in.email,
hashed_password=get_password_hash(obj_in.password), hashed_password=get_password_hash(obj_in.password),
fullname=obj_in.fullname, username=obj_in.username,
company_id=obj_in.company_id, company_id=obj_in.company_id,
department_id=obj_in.department_id, department_id=obj_in.department_id,
) )
@ -92,7 +92,7 @@ class CRUDUser(CRUDBase[User, UserCreate, UserUpdate]):
) )
def get_by_name(self, db: Session, *, name: str) -> Optional[User]: def get_by_name(self, db: Session, *, name: str) -> Optional[User]:
return db.query(self.model).filter(User.fullname == name).first() return db.query(self.model).filter(User.username == name).first()
def get_by_department_id( def get_by_department_id(
self, db: Session, *, department_id: int, skip: int = 0, limit: int = 100 self, db: Session, *, department_id: int, skip: int = 0, limit: int = 100

View File

@ -10,15 +10,15 @@ from sqlalchemy import Enum
from enum import Enum as PythonEnum from enum import Enum as PythonEnum
class MakerCheckerStatus(PythonEnum): class MakerCheckerStatus(PythonEnum):
PENDING = 'pending' PENDING = 'PENDING'
APPROVED = 'approved' APPROVED = 'APPROVED'
REJECTED = 'rejected' REJECTED = 'REJECTED'
class MakerCheckerActionType(PythonEnum): class MakerCheckerActionType(PythonEnum):
INSERT = 'insert' INSERT = 'INSERT'
UPDATE = 'update' UPDATE = 'UPDATE'
DELETE = 'delete' DELETE = 'DELETE'
class DocumentType(Base): class DocumentType(Base):
"""Models a document table""" """Models a document table"""
@ -38,7 +38,7 @@ class Document(Base):
uploaded_by = Column( uploaded_by = Column(
Integer, Integer,
ForeignKey("users.id"), ForeignKey("users.id"),
nullable=False, nullable=False
) )
uploaded_at = Column( uploaded_at = Column(
DateTime, DateTime,
@ -46,7 +46,9 @@ class Document(Base):
nullable=False, nullable=False,
) )
uploaded_by_user = relationship( uploaded_by_user = relationship(
"User", back_populates="uploaded_documents") "User", back_populates="uploaded_documents",
foreign_keys="[Document.uploaded_by]")
is_enabled = Column(Boolean, default=True) is_enabled = Column(Boolean, default=True)
verified = Column(Boolean, default=False) verified = Column(Boolean, default=False)
@ -69,23 +71,22 @@ class Document(Base):
back_populates="documents" back_populates="documents"
) )
# Event listeners for updating total_documents in Department # @event.listens_for(Document, 'after_insert')
@event.listens_for(Document, 'after_insert') # @event.listens_for(Document, 'after_delete')
@event.listens_for(Document, 'after_delete') # def update_total_documents(mapper, connection, target):
def update_total_documents(mapper, connection, target): # total_documents = connection.execute(
total_documents = connection.execute( # func.count().select().select_from(document_department_association).where(
select([func.count()]).select_from(document_department_association).where( # document_department_association.c.document_id == target.id)
document_department_association.c.document_id == target.id) # ).scalar()
).scalar()
department_ids = [assoc.department_id for assoc in connection.execute( # department_ids = [assoc.department_id for assoc in connection.execute(
select([document_department_association.c.department_id]).where( # select([document_department_association.c.department_id]).where(
document_department_association.c.document_id == target.id) # document_department_association.c.document_id == target.id)
)] # )]
# Update total_documents for each associated department # # Update total_documents for each associated department
for department_id in department_ids: # for department_id in department_ids:
connection.execute( # connection.execute(
update(Department).values(total_documents=total_documents).where( # update(Department).values(total_documents=total_documents).where(
Department.id == department_id) # Department.id == department_id)
) # )

View File

@ -4,7 +4,7 @@ from sqlalchemy import Column, Integer, Table, ForeignKey
document_department_association = Table( document_department_association = Table(
"document_department_association", "document_department_association",
Base.metadata, Base.metadata,
Column("department_id", Integer, ForeignKey("departments.id")), Column("department_id", Integer, ForeignKey("departments.id", ondelete="CASCADE", onupdate="CASCADE")),
Column("document_id", Integer, ForeignKey("document.id")), Column("document_id", Integer, ForeignKey("document.id", ondelete="CASCADE", onupdate="CASCADE")),
extend_existing=True extend_existing=True
) )

View File

@ -41,7 +41,8 @@ class User(Base):
company = relationship("Company", back_populates="users") company = relationship("Company", back_populates="users")
uploaded_documents = relationship( uploaded_documents = relationship(
"Document", back_populates="uploaded_by_user") "Document", back_populates="uploaded_by_user",
foreign_keys="[Document.uploaded_by]")
user_role = relationship( user_role = relationship(
"UserRole", back_populates="user", uselist=False, cascade="all, delete-orphan") "UserRole", back_populates="user", uselist=False, cascade="all, delete-orphan")
@ -61,18 +62,18 @@ class User(Base):
# Event listeners # Event listeners
@event.listens_for(User, 'after_insert') # @event.listens_for(User, 'after_insert')
@event.listens_for(User, 'after_delete') # @event.listens_for(User, 'after_delete')
def update_total_users(mapper, connection, target): # def update_total_users(mapper, connection, target):
department_id = target.department_id # department_id = target.department_id
total_users = connection.execute( # total_users = connection.execute(
select([func.count()]).select_from(User).where( # select([func.count()]).select_from(User).where(
User.department_id == department_id) # User.department_id == department_id)
).scalar() # ).scalar()
connection.execute( # connection.execute(
update(Department).values(total_users=total_users).where( # update(Department).values(total_users=total_users).where(
Department.id == department_id) # Department.id == department_id)
) # )
@event.listens_for(User, 'before_insert') @event.listens_for(User, 'before_insert')

View File

@ -35,7 +35,7 @@ class Document(BaseModel):
id: int id: int
is_enabled: bool is_enabled: bool
filename: str filename: str
uploaded_by: str uploaded_by: int
uploaded_at: datetime uploaded_at: datetime
departments: List[DepartmentList] = [] departments: List[DepartmentList] = []