update with new cascade

This commit is contained in:
Saurab-Shrestha9639*969**9858//852 2024-03-17 11:16:34 +05:45
parent de39e594f8
commit 2bcc7f589f
19 changed files with 120 additions and 70 deletions

4
.env
View File

@ -4,8 +4,8 @@ ENVIRONMENT=dev
DB_HOST=localhost
DB_USER=postgres
DB_PORT=5432
DB_PASSWORD=quick
DB_NAME=QuickGpt
DB_PASSWORD=admin
DB_NAME=GPT
SUPER_ADMIN_EMAIL=superadmin@email.com
SUPER_ADMIN_PASSWORD=supersecretpassword

View File

@ -0,0 +1,38 @@
"""udpate cascade
Revision ID: 39c817e4fc4a
Revises: cb320e7880fc
Create Date: 2024-03-17 11:08:22.426368
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '39c817e4fc4a'
down_revision: Union[str, None] = 'cb320e7880fc'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('document_department_association_document_id_fkey', 'document_department_association', type_='foreignkey')
op.drop_constraint('document_department_association_department_id_fkey', 'document_department_association', type_='foreignkey')
op.create_foreign_key(None, 'document_department_association', 'document', ['document_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
op.create_foreign_key(None, 'document_department_association', 'departments', ['department_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
# op.create_unique_constraint('unique_user_role', 'user_roles', ['user_id', 'role_id', 'company_id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# op.drop_constraint('unique_user_role', 'user_roles', type_='unique')
op.drop_constraint(None, 'document_department_association', type_='foreignkey')
op.drop_constraint(None, 'document_department_association', type_='foreignkey')
op.create_foreign_key('document_department_association_department_id_fkey', 'document_department_association', 'departments', ['department_id'], ['id'])
op.create_foreign_key('document_department_association_document_id_fkey', 'document_department_association', 'document', ['document_id'], ['id'])
# ### end Alembic commands ###

View File

@ -1,8 +1,8 @@
"""Create models
"""update
Revision ID: 6b17fe6b632e
Revision ID: cb320e7880fc
Revises:
Create Date: 2024-03-16 19:20:30.169049
Create Date: 2024-03-17 10:09:40.034197
"""
from typing import Sequence, Union
@ -12,7 +12,7 @@ import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '6b17fe6b632e'
revision: str = 'cb320e7880fc'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
{"graph_dict": {}}

View File

@ -0,0 +1 @@
{"embedding_dict": {}, "text_id_to_ref_doc_id": {}, "metadata_dict": {}}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
tmp lock file

View File

@ -0,0 +1 @@
{"collections": {"make_this_parameterizable_per_api_call": {"vectors": {"size": 768, "distance": "Cosine", "hnsw_config": null, "quantization_config": null, "on_disk": null}, "shard_number": null, "sharding_method": null, "replication_factor": null, "write_consistency_factor": null, "on_disk_payload": null, "hnsw_config": null, "wal_config": null, "optimizers_config": null, "init_from": null, "quantization_config": null, "sparse_vectors": null}}, "aliases": {}}

View File

@ -18,7 +18,7 @@ from private_gpt.server.chunks.chunks_router import chunks_router
from private_gpt.server.ingest.ingest_router import ingest_router
from private_gpt.components.ocr_components.table_ocr_api import pdf_router
from private_gpt.server.completions.completions_router import completions_router
from private_gpt.server.embeddings.embeddings_router import embxeddings_router
from private_gpt.server.embeddings.embeddings_router import embeddings_router
logger = logging.getLogger(__name__)

View File

@ -200,12 +200,14 @@ async def create_documents(
status_code=409,
detail="File already exists. Choose a different file.",
)
print(f"{file_name} uploaded by {current_user.id} action {MakerCheckerActionType.INSERT.value} and status {MakerCheckerStatus.PENDING.value}")
docs_in = schemas.DocumentMakerCreate(
filename=file_name,
uploaded_by=current_user.id,
action_type=MakerCheckerActionType.INSERT,
status=MakerCheckerStatus.PENDING
)
print("DOCUMENT CREATE: ", docs_in)
document = crud.documents.create(db=db, obj_in=docs_in)
department_ids = [int(number) for number in department_ids.split(",")]
for department_id in department_ids:

View File

@ -41,12 +41,12 @@ def register_user(
company_id=company.id,
department_id=department.id,
)
try:
send_registration_email(fullname, email, password)
except Exception as e:
logging.info(f"Failed to send registration email: {str(e)}")
raise HTTPException(
status_code=500, detail=f"Failed to send registration email.")
# try:
# send_registration_email(fullname, email, password)
# except Exception as e:
# logging.info(f"Failed to send registration email: {str(e)}")
# raise HTTPException(
# status_code=500, detail=f"Failed to send registration email.")
return crud.user.create(db, obj_in=user_in)
@ -82,7 +82,7 @@ def create_token_payload(user: models.User, user_role: models.UserRole) -> dict:
"id": str(user.id),
"email": str(user.email),
"role": user_role.role.name,
"username": str(user.fullname),
"username": str(user.username),
"company_id": user_role.company.id if user_role.company else None,
"department_id": user.department_id
}
@ -173,7 +173,7 @@ def login_access_token(
token_payload = {
"id": str(user.id),
"email": str(user.email),
"username": str(user.fullname),
"username": str(user.username),
"role": role,
"company_id": company_id,
"department_id": str(user.department_id),
@ -229,7 +229,7 @@ def register(
db: Session = Depends(deps.get_db),
email: str = Body(...),
fullname: str = Body(...),
password: str = Body(...),
# password: str = Body(...),
department_id: int = Body(None, title="Department ID",
description="Department name for the user (if applicable)"),
role_name: str = Body(None, title="Role Name",
@ -249,14 +249,14 @@ def register(
model='User',
action='creation',
details={"status": '409', 'detail': "The user with this email already exists!", },
user_id=current_user.id
# user_id=current_user.id
)
raise HTTPException(
status_code=409,
detail="The user with this email already exists!",
)
# random_password = security.generate_random_password()
random_password = password
random_password = security.generate_random_password()
# random_password = password
try:
company_id = current_user.company_id

View File

@ -14,7 +14,7 @@ from private_gpt.constants import UNCHECKED_DIR
from private_gpt.users.constants.role import Role
from private_gpt.users import crud, models, schemas
from private_gpt.server.ingest.ingest_router import create_documents, ingest
from private_gpt.users.models.makerchecker import MakerCheckerActionType, MakerCheckerStatus
from private_gpt.users.models.document import MakerCheckerActionType, MakerCheckerStatus
logger = logging.getLogger(__name__)
router = APIRouter(prefix='/documents', tags=['Documents'])
@ -284,7 +284,9 @@ async def verify_documents(
detail="Document not found!",
)
unchecked_path = Path(f"{UNCHECKED_DIR}/{document.filename}")
if checker_in.status == MakerCheckerStatus.APPROVED:
print(checker_in.status)
print(MakerCheckerStatus.APPROVED.value)
if checker_in.status == MakerCheckerStatus.APPROVED.value:
checker = schemas.DocumentCheckerUpdate(
status=MakerCheckerStatus.APPROVED,
is_enabled=checker_in.is_enabled,
@ -294,13 +296,13 @@ async def verify_documents(
crud.documents.update(db=db, db_obj= document, obj_in=checker)
if document.doc_type_id == 2:
return ingest(request, unchecked_path)
return await ingest(request, unchecked_path)
elif document.doc_type_id == 3:
return ingest(request, unchecked_path)
return await ingest(request, unchecked_path)
else:
return ingest(request, unchecked_path)
return await ingest(request, unchecked_path)
elif checker_in.status == MakerCheckerStatus.REJECTED:
elif checker_in.status == MakerCheckerStatus.REJECTED.value:
checker = schemas.DocumentCheckerUpdate(
status=MakerCheckerStatus.REJECTED,
is_enabled=checker_in.is_enabled,

View File

@ -18,7 +18,7 @@ class CRUDUser(CRUDBase[User, UserCreate, UserUpdate]):
db_obj = User(
email=obj_in.email,
hashed_password=get_password_hash(obj_in.password),
fullname=obj_in.fullname,
username=obj_in.username,
company_id=obj_in.company_id,
department_id=obj_in.department_id,
)
@ -92,7 +92,7 @@ class CRUDUser(CRUDBase[User, UserCreate, UserUpdate]):
)
def get_by_name(self, db: Session, *, name: str) -> Optional[User]:
return db.query(self.model).filter(User.fullname == name).first()
return db.query(self.model).filter(User.username == name).first()
def get_by_department_id(
self, db: Session, *, department_id: int, skip: int = 0, limit: int = 100

View File

@ -10,15 +10,15 @@ from sqlalchemy import Enum
from enum import Enum as PythonEnum
class MakerCheckerStatus(PythonEnum):
PENDING = 'pending'
APPROVED = 'approved'
REJECTED = 'rejected'
PENDING = 'PENDING'
APPROVED = 'APPROVED'
REJECTED = 'REJECTED'
class MakerCheckerActionType(PythonEnum):
INSERT = 'insert'
UPDATE = 'update'
DELETE = 'delete'
INSERT = 'INSERT'
UPDATE = 'UPDATE'
DELETE = 'DELETE'
class DocumentType(Base):
"""Models a document table"""
@ -38,7 +38,7 @@ class Document(Base):
uploaded_by = Column(
Integer,
ForeignKey("users.id"),
nullable=False,
nullable=False
)
uploaded_at = Column(
DateTime,
@ -46,7 +46,9 @@ class Document(Base):
nullable=False,
)
uploaded_by_user = relationship(
"User", back_populates="uploaded_documents")
"User", back_populates="uploaded_documents",
foreign_keys="[Document.uploaded_by]")
is_enabled = Column(Boolean, default=True)
verified = Column(Boolean, default=False)
@ -69,23 +71,22 @@ class Document(Base):
back_populates="documents"
)
# Event listeners for updating total_documents in Department
@event.listens_for(Document, 'after_insert')
@event.listens_for(Document, 'after_delete')
def update_total_documents(mapper, connection, target):
total_documents = connection.execute(
select([func.count()]).select_from(document_department_association).where(
document_department_association.c.document_id == target.id)
).scalar()
# @event.listens_for(Document, 'after_insert')
# @event.listens_for(Document, 'after_delete')
# def update_total_documents(mapper, connection, target):
# total_documents = connection.execute(
# func.count().select().select_from(document_department_association).where(
# document_department_association.c.document_id == target.id)
# ).scalar()
department_ids = [assoc.department_id for assoc in connection.execute(
select([document_department_association.c.department_id]).where(
document_department_association.c.document_id == target.id)
)]
# department_ids = [assoc.department_id for assoc in connection.execute(
# select([document_department_association.c.department_id]).where(
# document_department_association.c.document_id == target.id)
# )]
# Update total_documents for each associated department
for department_id in department_ids:
connection.execute(
update(Department).values(total_documents=total_documents).where(
Department.id == department_id)
)
# # Update total_documents for each associated department
# for department_id in department_ids:
# connection.execute(
# update(Department).values(total_documents=total_documents).where(
# Department.id == department_id)
# )

View File

@ -4,7 +4,7 @@ from sqlalchemy import Column, Integer, Table, ForeignKey
document_department_association = Table(
"document_department_association",
Base.metadata,
Column("department_id", Integer, ForeignKey("departments.id")),
Column("document_id", Integer, ForeignKey("document.id")),
Column("department_id", Integer, ForeignKey("departments.id", ondelete="CASCADE", onupdate="CASCADE")),
Column("document_id", Integer, ForeignKey("document.id", ondelete="CASCADE", onupdate="CASCADE")),
extend_existing=True
)
)

View File

@ -41,7 +41,8 @@ class User(Base):
company = relationship("Company", back_populates="users")
uploaded_documents = relationship(
"Document", back_populates="uploaded_by_user")
"Document", back_populates="uploaded_by_user",
foreign_keys="[Document.uploaded_by]")
user_role = relationship(
"UserRole", back_populates="user", uselist=False, cascade="all, delete-orphan")
@ -61,18 +62,18 @@ class User(Base):
# Event listeners
@event.listens_for(User, 'after_insert')
@event.listens_for(User, 'after_delete')
def update_total_users(mapper, connection, target):
department_id = target.department_id
total_users = connection.execute(
select([func.count()]).select_from(User).where(
User.department_id == department_id)
).scalar()
connection.execute(
update(Department).values(total_users=total_users).where(
Department.id == department_id)
)
# @event.listens_for(User, 'after_insert')
# @event.listens_for(User, 'after_delete')
# def update_total_users(mapper, connection, target):
# department_id = target.department_id
# total_users = connection.execute(
# select([func.count()]).select_from(User).where(
# User.department_id == department_id)
# ).scalar()
# connection.execute(
# update(Department).values(total_users=total_users).where(
# Department.id == department_id)
# )
@event.listens_for(User, 'before_insert')

View File

@ -35,7 +35,7 @@ class Document(BaseModel):
id: int
is_enabled: bool
filename: str
uploaded_by: str
uploaded_by: int
uploaded_at: datetime
departments: List[DepartmentList] = []