mirror of
https://github.com/imartinez/privateGPT.git
synced 2025-07-01 09:42:03 +00:00
Update maker checker
This commit is contained in:
parent
6d7babb349
commit
de39e594f8
2
.env
2
.env
@ -4,7 +4,7 @@ ENVIRONMENT=dev
|
|||||||
DB_HOST=localhost
|
DB_HOST=localhost
|
||||||
DB_USER=postgres
|
DB_USER=postgres
|
||||||
DB_PORT=5432
|
DB_PORT=5432
|
||||||
DB_PASSWORD=admin
|
DB_PASSWORD=quick
|
||||||
DB_NAME=QuickGpt
|
DB_NAME=QuickGpt
|
||||||
|
|
||||||
SUPER_ADMIN_EMAIL=superadmin@email.com
|
SUPER_ADMIN_EMAIL=superadmin@email.com
|
||||||
|
@ -1,32 +0,0 @@
|
|||||||
"""Added checker
|
|
||||||
|
|
||||||
Revision ID: 59b6ae907209
|
|
||||||
Revises: ee8ae7222697
|
|
||||||
Create Date: 2024-03-07 16:48:21.115238
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '59b6ae907209'
|
|
||||||
down_revision: Union[str, None] = 'ee8ae7222697'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
# op.create_unique_constraint('unique_user_role', 'user_roles', ['user_id', 'role_id', 'company_id'])
|
|
||||||
op.add_column('users', sa.Column('checker', sa.Boolean(), nullable=True))
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_column('users', 'checker')
|
|
||||||
# op.drop_constraint('unique_user_role', 'user_roles', type_='unique')
|
|
||||||
# ### end Alembic commands ###
|
|
157
alembic/versions/6b17fe6b632e_create_models.py
Normal file
157
alembic/versions/6b17fe6b632e_create_models.py
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
"""Create models
|
||||||
|
|
||||||
|
Revision ID: 6b17fe6b632e
|
||||||
|
Revises:
|
||||||
|
Create Date: 2024-03-16 19:20:30.169049
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '6b17fe6b632e'
|
||||||
|
down_revision: Union[str, None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('companies',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_companies_id'), 'companies', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_companies_name'), 'companies', ['name'], unique=True)
|
||||||
|
op.create_table('document_type',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('type', sa.String(length=225), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('type')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_document_type_id'), 'document_type', ['id'], unique=False)
|
||||||
|
op.create_table('roles',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=100), nullable=True),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_roles_id'), 'roles', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_roles_name'), 'roles', ['name'], unique=False)
|
||||||
|
op.create_table('departments',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(), nullable=True),
|
||||||
|
sa.Column('company_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('total_users', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('total_documents', sa.Integer(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['company_id'], ['companies.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_departments_id'), 'departments', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_departments_name'), 'departments', ['name'], unique=True)
|
||||||
|
op.create_table('subscriptions',
|
||||||
|
sa.Column('sub_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('company_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('start_date', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('end_date', sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['company_id'], ['companies.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('sub_id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_subscriptions_sub_id'), 'subscriptions', ['sub_id'], unique=False)
|
||||||
|
op.create_table('users',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('email', sa.String(length=225), nullable=False),
|
||||||
|
sa.Column('hashed_password', sa.String(), nullable=False),
|
||||||
|
sa.Column('username', sa.String(length=225), nullable=False),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||||
|
sa.Column('last_login', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('password_created', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('checker', sa.Boolean(), nullable=True),
|
||||||
|
sa.Column('company_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('department_id', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['company_id'], ['companies.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('email'),
|
||||||
|
sa.UniqueConstraint('username'),
|
||||||
|
sa.UniqueConstraint('username', name='unique_username_no_spacing')
|
||||||
|
)
|
||||||
|
op.create_table('audit',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('timestamp', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('model', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('action', sa.String(length=50), nullable=False),
|
||||||
|
sa.Column('details', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||||
|
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='SET NULL'),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_audit_id'), 'audit', ['id'], unique=False)
|
||||||
|
op.create_table('document',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('filename', sa.String(length=225), nullable=False),
|
||||||
|
sa.Column('uploaded_by', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('uploaded_at', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('is_enabled', sa.Boolean(), nullable=True),
|
||||||
|
sa.Column('verified', sa.Boolean(), nullable=True),
|
||||||
|
sa.Column('doc_type_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('action_type', sa.Enum('INSERT', 'UPDATE', 'DELETE', name='makercheckeractiontype'), nullable=False),
|
||||||
|
sa.Column('status', sa.Enum('PENDING', 'APPROVED', 'REJECTED', name='makercheckerstatus'), nullable=False),
|
||||||
|
sa.Column('verified_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('verified_by', sa.Integer(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['doc_type_id'], ['document_type.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['uploaded_by'], ['users.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['verified_by'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('filename')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_document_id'), 'document', ['id'], unique=False)
|
||||||
|
op.create_table('user_roles',
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('company_id', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['company_id'], ['companies.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('user_id', 'role_id', 'company_id'),
|
||||||
|
sa.UniqueConstraint('user_id', 'role_id', 'company_id', name='unique_user_role')
|
||||||
|
)
|
||||||
|
op.create_table('document_department_association',
|
||||||
|
sa.Column('department_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('document_id', sa.Integer(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['document_id'], ['document.id'], )
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table('document_department_association')
|
||||||
|
op.drop_table('user_roles')
|
||||||
|
op.drop_index(op.f('ix_document_id'), table_name='document')
|
||||||
|
op.drop_table('document')
|
||||||
|
op.drop_index(op.f('ix_audit_id'), table_name='audit')
|
||||||
|
op.drop_table('audit')
|
||||||
|
op.drop_table('users')
|
||||||
|
op.drop_index(op.f('ix_subscriptions_sub_id'), table_name='subscriptions')
|
||||||
|
op.drop_table('subscriptions')
|
||||||
|
op.drop_index(op.f('ix_departments_name'), table_name='departments')
|
||||||
|
op.drop_index(op.f('ix_departments_id'), table_name='departments')
|
||||||
|
op.drop_table('departments')
|
||||||
|
op.drop_index(op.f('ix_roles_name'), table_name='roles')
|
||||||
|
op.drop_index(op.f('ix_roles_id'), table_name='roles')
|
||||||
|
op.drop_table('roles')
|
||||||
|
op.drop_index(op.f('ix_document_type_id'), table_name='document_type')
|
||||||
|
op.drop_table('document_type')
|
||||||
|
op.drop_index(op.f('ix_companies_name'), table_name='companies')
|
||||||
|
op.drop_index(op.f('ix_companies_id'), table_name='companies')
|
||||||
|
op.drop_table('companies')
|
||||||
|
# ### end Alembic commands ###
|
@ -1,67 +0,0 @@
|
|||||||
"""Create department and audit models
|
|
||||||
|
|
||||||
Revision ID: 9ae2f4e97436
|
|
||||||
Revises:
|
|
||||||
Create Date: 2024-02-28 14:53:19.144973
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '9ae2f4e97436'
|
|
||||||
down_revision: Union[str, None] = None
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table('departments',
|
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('name', sa.String(), nullable=True),
|
|
||||||
sa.Column('company_id', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('total_users', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('total_documents', sa.Integer(), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['company_id'], ['companies.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_index(op.f('ix_departments_id'), 'departments', ['id'], unique=False)
|
|
||||||
op.create_index(op.f('ix_departments_name'), 'departments', ['name'], unique=True)
|
|
||||||
op.create_table('audit',
|
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('timestamp', sa.DateTime(), nullable=False),
|
|
||||||
sa.Column('user_id', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('model', sa.String(), nullable=False),
|
|
||||||
sa.Column('action', sa.String(), nullable=False),
|
|
||||||
sa.Column('details', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='SET NULL'),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_index(op.f('ix_audit_id'), 'audit', ['id'], unique=False)
|
|
||||||
op.add_column('document', sa.Column('department_id', sa.Integer(), nullable=True))
|
|
||||||
op.create_foreign_key(None, 'document', 'departments', ['department_id'], ['id'])
|
|
||||||
# op.create_unique_constraint('unique_user_role', 'user_roles', ['user_id', 'role_id', 'company_id'])
|
|
||||||
op.add_column('users', sa.Column('password_created', sa.DateTime(), nullable=True))
|
|
||||||
op.add_column('users', sa.Column('department_id', sa.Integer(), nullable=True))
|
|
||||||
op.create_foreign_key(None, 'users', 'departments', ['department_id'], ['id'])
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_constraint(None, 'users', type_='foreignkey')
|
|
||||||
op.drop_column('users', 'department_id')
|
|
||||||
op.drop_column('users', 'password_created')
|
|
||||||
# op.drop_constraint('unique_user_role', 'user_roles', type_='unique')
|
|
||||||
op.drop_constraint(None, 'document', type_='foreignkey')
|
|
||||||
op.drop_column('document', 'department_id')
|
|
||||||
op.drop_index(op.f('ix_audit_id'), table_name='audit')
|
|
||||||
op.drop_table('audit')
|
|
||||||
op.drop_index(op.f('ix_departments_name'), table_name='departments')
|
|
||||||
op.drop_index(op.f('ix_departments_id'), table_name='departments')
|
|
||||||
op.drop_table('departments')
|
|
||||||
# ### end Alembic commands ###
|
|
@ -1,44 +0,0 @@
|
|||||||
"""Added ip_address column in audit log
|
|
||||||
|
|
||||||
Revision ID: caa694775d4e
|
|
||||||
Revises: 9ae2f4e97436
|
|
||||||
Create Date: 2024-03-05 10:37:38.955333
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = 'caa694775d4e'
|
|
||||||
down_revision: Union[str, None] = '9ae2f4e97436'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('audit', sa.Column('ip_address', sa.String(), nullable=True))
|
|
||||||
op.alter_column('document', 'department_id',
|
|
||||||
existing_type=sa.INTEGER(),
|
|
||||||
nullable=False)
|
|
||||||
# op.create_unique_constraint('unique_user_role', 'user_roles', ['user_id', 'role_id', 'company_id'])
|
|
||||||
op.alter_column('users', 'department_id',
|
|
||||||
existing_type=sa.INTEGER(),
|
|
||||||
nullable=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column('users', 'department_id',
|
|
||||||
existing_type=sa.INTEGER(),
|
|
||||||
nullable=True)
|
|
||||||
# op.drop_constraint('unique_user_role', 'user_roles', type_='unique')
|
|
||||||
op.alter_column('document', 'department_id',
|
|
||||||
existing_type=sa.INTEGER(),
|
|
||||||
nullable=True)
|
|
||||||
op.drop_column('audit', 'ip_address')
|
|
||||||
# ### end Alembic commands ###
|
|
@ -1,32 +0,0 @@
|
|||||||
"""added is_enabled in documents
|
|
||||||
|
|
||||||
Revision ID: ee8ae7222697
|
|
||||||
Revises: f2978211af18
|
|
||||||
Create Date: 2024-03-07 15:34:22.365353
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = 'ee8ae7222697'
|
|
||||||
down_revision: Union[str, None] = 'f2978211af18'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('document', sa.Column('is_enabled', sa.Boolean(), nullable=True))
|
|
||||||
# op.create_unique_constraint('unique_user_role', 'user_roles', ['user_id', 'role_id', 'company_id'])
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
# op.drop_constraint('unique_user_role', 'user_roles', type_='unique')
|
|
||||||
op.drop_column('document', 'is_enabled')
|
|
||||||
# ### end Alembic commands ###
|
|
@ -1,41 +0,0 @@
|
|||||||
"""Documents association with multiple departments
|
|
||||||
|
|
||||||
Revision ID: f2978211af18
|
|
||||||
Revises: caa694775d4e
|
|
||||||
Create Date: 2024-03-06 16:43:16.492578
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = 'f2978211af18'
|
|
||||||
down_revision: Union[str, None] = 'caa694775d4e'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table('document_department_association',
|
|
||||||
sa.Column('department_id', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('document_id', sa.Integer(), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['department_id'], ['departments.id'], ),
|
|
||||||
sa.ForeignKeyConstraint(['document_id'], ['document.id'], )
|
|
||||||
)
|
|
||||||
op.drop_constraint('document_department_id_fkey', 'document', type_='foreignkey')
|
|
||||||
op.drop_column('document', 'department_id')
|
|
||||||
# op.create_unique_constraint('unique_user_role', 'user_roles', ['user_id', 'role_id', 'company_id'])
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
# op.drop_constraint('unique_user_role', 'user_roles', type_='unique')
|
|
||||||
op.add_column('document', sa.Column('department_id', sa.INTEGER(), autoincrement=False, nullable=False))
|
|
||||||
op.create_foreign_key('document_department_id_fkey', 'document', 'departments', ['department_id'], ['id'])
|
|
||||||
op.drop_table('document_department_association')
|
|
||||||
# ### end Alembic commands ###
|
|
2
local_data/.gitignore
vendored
2
local_data/.gitignore
vendored
@ -1,2 +0,0 @@
|
|||||||
*
|
|
||||||
!.gitignore
|
|
@ -3,6 +3,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
PROJECT_ROOT_PATH: Path = Path(__file__).parents[1]
|
PROJECT_ROOT_PATH: Path = Path(__file__).parents[1]
|
||||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
UPLOAD_DIR = os.path.join(script_dir, "static") # Actual upload path for uploaded file
|
UPLOAD_DIR = os.path.join(script_dir, "static/checked") # Actual upload path for uploaded file
|
||||||
|
UNCHECKED_DIR = os.path.join(script_dir, "static/unchecked") # Actual upload path for uploaded file
|
||||||
|
|
||||||
OCR_UPLOAD = os.path.join(script_dir, 'uploads') # temporary upload path for scanned pdf file
|
OCR_UPLOAD = os.path.join(script_dir, 'uploads') # temporary upload path for scanned pdf file
|
||||||
|
@ -18,7 +18,7 @@ from private_gpt.server.chunks.chunks_router import chunks_router
|
|||||||
from private_gpt.server.ingest.ingest_router import ingest_router
|
from private_gpt.server.ingest.ingest_router import ingest_router
|
||||||
from private_gpt.components.ocr_components.table_ocr_api import pdf_router
|
from private_gpt.components.ocr_components.table_ocr_api import pdf_router
|
||||||
from private_gpt.server.completions.completions_router import completions_router
|
from private_gpt.server.completions.completions_router import completions_router
|
||||||
from private_gpt.server.embeddings.embeddings_router import embeddings_router
|
from private_gpt.server.embeddings.embeddings_router import embxeddings_router
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -59,13 +59,4 @@ def create_app(root_injector: Injector) -> FastAPI:
|
|||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
if settings.ui.enabled:
|
|
||||||
logger.debug("Importing the UI module")
|
|
||||||
try:
|
|
||||||
from private_gpt.ui.ui import PrivateGptUi
|
|
||||||
except ImportError as e:
|
|
||||||
raise ImportError(
|
|
||||||
"UI dependencies not found, install with `poetry install --extras ui`"
|
|
||||||
) from e
|
|
||||||
|
|
||||||
return app
|
return app
|
@ -2,10 +2,11 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
import aiofiles
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
import aiofiles
|
|
||||||
|
|
||||||
|
from private_gpt.users.models.document import MakerCheckerActionType, MakerCheckerStatus
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Request, UploadFile, File, status, Security, Body, Form
|
from fastapi import APIRouter, Depends, HTTPException, Request, UploadFile, File, status, Security, Body, Form
|
||||||
from fastapi.responses import JSONResponse
|
from fastapi.responses import JSONResponse
|
||||||
@ -18,7 +19,7 @@ from private_gpt.users.constants.role import Role
|
|||||||
from private_gpt.server.ingest.ingest_service import IngestService
|
from private_gpt.server.ingest.ingest_service import IngestService
|
||||||
from private_gpt.server.ingest.model import IngestedDoc
|
from private_gpt.server.ingest.model import IngestedDoc
|
||||||
from private_gpt.server.utils.auth import authenticated
|
from private_gpt.server.utils.auth import authenticated
|
||||||
from private_gpt.constants import UPLOAD_DIR, OCR_UPLOAD
|
from private_gpt.constants import UPLOAD_DIR
|
||||||
|
|
||||||
ingest_router = APIRouter(prefix="/v1", dependencies=[Depends(authenticated)])
|
ingest_router = APIRouter(prefix="/v1", dependencies=[Depends(authenticated)])
|
||||||
|
|
||||||
@ -162,8 +163,8 @@ def delete_file(
|
|||||||
model='Document',
|
model='Document',
|
||||||
action='delete',
|
action='delete',
|
||||||
details={
|
details={
|
||||||
"detail": f"{filename}' deleted successfully.",
|
"detail": f"{filename}",
|
||||||
'user': current_user.fullname,
|
'user': current_user.username,
|
||||||
},
|
},
|
||||||
user_id=current_user.id
|
user_id=current_user.id
|
||||||
)
|
)
|
||||||
@ -186,13 +187,12 @@ async def create_documents(
|
|||||||
current_user: models.User = None,
|
current_user: models.User = None,
|
||||||
departments: schemas.DocumentDepartmentList = Depends(),
|
departments: schemas.DocumentDepartmentList = Depends(),
|
||||||
log_audit: models.Audit = None,
|
log_audit: models.Audit = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Create documents in the `Document` table and update the \n
|
Create documents in the `Document` table and update the \n
|
||||||
`Document Department Association` table with the departments ids for the documents.
|
`Document Department Association` table with the departments ids for the documents.
|
||||||
"""
|
"""
|
||||||
department_ids = departments.departments_ids
|
department_ids = departments.departments_ids
|
||||||
print("Department IDS: ", department_ids)
|
|
||||||
file_ingested = crud.documents.get_by_filename(
|
file_ingested = crud.documents.get_by_filename(
|
||||||
db, file_name=file_name)
|
db, file_name=file_name)
|
||||||
if file_ingested:
|
if file_ingested:
|
||||||
@ -200,19 +200,23 @@ async def create_documents(
|
|||||||
status_code=409,
|
status_code=409,
|
||||||
detail="File already exists. Choose a different file.",
|
detail="File already exists. Choose a different file.",
|
||||||
)
|
)
|
||||||
docs_in = schemas.DocumentCreate(
|
docs_in = schemas.DocumentMakerCreate(
|
||||||
filename=file_name, uploaded_by=current_user.id
|
filename=file_name,
|
||||||
|
uploaded_by=current_user.id,
|
||||||
|
action_type=MakerCheckerActionType.INSERT,
|
||||||
|
status=MakerCheckerStatus.PENDING
|
||||||
)
|
)
|
||||||
document = crud.documents.create(db=db, obj_in=docs_in)
|
document = crud.documents.create(db=db, obj_in=docs_in)
|
||||||
department_ids = [int(number) for number in department_ids.split(",")]
|
department_ids = [int(number) for number in department_ids.split(",")]
|
||||||
for department_id in department_ids:
|
for department_id in department_ids:
|
||||||
db.execute(models.document_department_association.insert().values(document_id=document.id, department_id=department_id))
|
db.execute(models.document_department_association.insert().values(document_id=document.id, department_id=department_id))
|
||||||
|
|
||||||
log_audit(
|
log_audit(
|
||||||
model='Document',
|
model='Document',
|
||||||
action='create',
|
action='create',
|
||||||
details={
|
details={
|
||||||
'detail': f"{file_name} uploaded successfully",
|
'filename': f"{file_name}",
|
||||||
'user': f"{current_user.fullname}",
|
'user': f"{current_user.username}",
|
||||||
'departments': f"{department_ids}"
|
'departments': f"{department_ids}"
|
||||||
},
|
},
|
||||||
user_id=current_user.id
|
user_id=current_user.id
|
||||||
@ -275,7 +279,7 @@ async def common_ingest_logic(
|
|||||||
)
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"{file_name} is uploaded by the {current_user.fullname}.")
|
f"{file_name} is uploaded by the {current_user.username}.")
|
||||||
|
|
||||||
return ingested_documents
|
return ingested_documents
|
||||||
|
|
||||||
@ -285,14 +289,46 @@ async def common_ingest_logic(
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(traceback.print_exc())
|
print(traceback.print_exc())
|
||||||
log_audit(model='Document', action='create',
|
|
||||||
details={"status": 500, "detail": "Internal Server Error: Unable to ingest file.", }, user_id=current_user.id)
|
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=500,
|
status_code=500,
|
||||||
detail="Internal Server Error: Unable to ingest file.",
|
detail="Internal Server Error: Unable to ingest file.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def ingest(request: Request, file_path: str) -> IngestResponse:
|
||||||
|
"""Ingests and processes a file, storing its chunks to be used as context.
|
||||||
|
|
||||||
|
The context obtained from files is later used in
|
||||||
|
`/chat/completions`, `/completions`, and `/chunks` APIs.
|
||||||
|
|
||||||
|
Most common document
|
||||||
|
formats are supported, but you may be prompted to install an extra dependency to
|
||||||
|
manage a specific file type.
|
||||||
|
|
||||||
|
A file can generate different Documents (for example a PDF generates one Document
|
||||||
|
per page). All Documents IDs are returned in the response, together with the
|
||||||
|
extracted Metadata (which is later used to improve context retrieval). Those IDs
|
||||||
|
can be used to filter the context used to create responses in
|
||||||
|
`/chat/completions`, `/completions`, and `/chunks` APIs.
|
||||||
|
"""
|
||||||
|
service = request.state.injector.get(IngestService)
|
||||||
|
try:
|
||||||
|
with open(file_path, 'rb') as file:
|
||||||
|
file_name = Path(file_path).name
|
||||||
|
upload_path = Path(f"{UPLOAD_DIR}/{file_name}")
|
||||||
|
|
||||||
|
with open(upload_path, "wb") as f:
|
||||||
|
f.write(file.file.read())
|
||||||
|
with open(upload_path, "rb") as f:
|
||||||
|
ingested_documents = service.ingest_bin_data(file.filename, f)
|
||||||
|
except Exception as e:
|
||||||
|
return {"message": f"There was an error uploading the file(s)\n {e}"}
|
||||||
|
finally:
|
||||||
|
file.file.close()
|
||||||
|
return IngestResponse(object="list", model="private-gpt", data=ingested_documents)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ingest_router.post("/ingest/file", response_model=IngestResponse, tags=["Ingestion"])
|
@ingest_router.post("/ingest/file", response_model=IngestResponse, tags=["Ingestion"])
|
||||||
async def ingest_file(
|
async def ingest_file(
|
||||||
request: Request,
|
request: Request,
|
||||||
@ -329,7 +365,7 @@ async def ingest_file(
|
|||||||
with open(upload_path, "rb") as f:
|
with open(upload_path, "rb") as f:
|
||||||
ingested_documents = service.ingest_bin_data(original_filename, f)
|
ingested_documents = service.ingest_bin_data(original_filename, f)
|
||||||
|
|
||||||
logger.info(f"{original_filename} is uploaded by {current_user.fullname} in {departments.departments_ids}")
|
logger.info(f"{original_filename} is uploaded by {current_user.username} in {departments.departments_ids}")
|
||||||
response = IngestResponse(
|
response = IngestResponse(
|
||||||
object="list", model="private-gpt", data=ingested_documents
|
object="list", model="private-gpt", data=ingested_documents
|
||||||
)
|
)
|
||||||
@ -341,15 +377,6 @@ async def ingest_file(
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(traceback.print_exc())
|
print(traceback.print_exc())
|
||||||
log_audit(
|
|
||||||
model="Document",
|
|
||||||
action="create",
|
|
||||||
details={
|
|
||||||
"status": 500,
|
|
||||||
"detail": "Internal Server Error: Unable to ingest file.",
|
|
||||||
},
|
|
||||||
user_id=current_user.id,
|
|
||||||
)
|
|
||||||
logger.error(f"There was an error uploading the file(s): {str(e)}")
|
logger.error(f"There was an error uploading the file(s): {str(e)}")
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
from typing import Any, List
|
from typing import Any, List
|
||||||
|
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from fastapi.responses import JSONResponse
|
from fastapi import APIRouter, Depends, HTTPException, Security, Request
|
||||||
from fastapi.encoders import jsonable_encoder
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status, Security, Request
|
|
||||||
|
|
||||||
from private_gpt.users.api import deps
|
from private_gpt.users.api import deps
|
||||||
from private_gpt.users.constants.role import Role
|
from private_gpt.users.constants.role import Role
|
||||||
@ -15,7 +13,6 @@ router = APIRouter(prefix="/audit", tags=["Companies"])
|
|||||||
|
|
||||||
@router.get("", response_model=List[schemas.Audit])
|
@router.get("", response_model=List[schemas.Audit])
|
||||||
def list_auditlog(
|
def list_auditlog(
|
||||||
request: Request,
|
|
||||||
db: Session = Depends(deps.get_db),
|
db: Session = Depends(deps.get_db),
|
||||||
skip: int = 0,
|
skip: int = 0,
|
||||||
limit: int = 100,
|
limit: int = 100,
|
||||||
@ -30,7 +27,7 @@ def list_auditlog(
|
|||||||
def get_fullname(id):
|
def get_fullname(id):
|
||||||
user = crud.user.get_by_id(db, id=id)
|
user = crud.user.get_by_id(db, id=id)
|
||||||
if user:
|
if user:
|
||||||
return user.fullname
|
return user.username
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
logs = crud.audit.get_multi_desc(db, skip=skip, limit=limit)
|
logs = crud.audit.get_multi_desc(db, skip=skip, limit=limit)
|
||||||
@ -52,7 +49,6 @@ def list_auditlog(
|
|||||||
|
|
||||||
@router.post("", response_model=schemas.Audit)
|
@router.post("", response_model=schemas.Audit)
|
||||||
def get_auditlog(
|
def get_auditlog(
|
||||||
request: Request,
|
|
||||||
audit: schemas.GetAudit,
|
audit: schemas.GetAudit,
|
||||||
db: Session = Depends(deps.get_db),
|
db: Session = Depends(deps.get_db),
|
||||||
current_user: models.User = Security(
|
current_user: models.User = Security(
|
||||||
@ -66,7 +62,7 @@ def get_auditlog(
|
|||||||
def get_fullname(id):
|
def get_fullname(id):
|
||||||
user = crud.user.get_by_id(db, id=id)
|
user = crud.user.get_by_id(db, id=id)
|
||||||
if user:
|
if user:
|
||||||
return user.fullname
|
return user.username
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
logs = crud.audit.get_by_id(db, id=audit.id)
|
logs = crud.audit.get_by_id(db, id=audit.id)
|
||||||
|
@ -37,7 +37,7 @@ def register_user(
|
|||||||
user_in = schemas.UserCreate(
|
user_in = schemas.UserCreate(
|
||||||
email=email,
|
email=email,
|
||||||
password=password,
|
password=password,
|
||||||
fullname=fullname,
|
username=fullname,
|
||||||
company_id=company.id,
|
company_id=company.id,
|
||||||
department_id=department.id,
|
department_id=department.id,
|
||||||
)
|
)
|
||||||
@ -97,7 +97,7 @@ def ad_user_register(
|
|||||||
"""
|
"""
|
||||||
Register a new user in the database. Company id is directly given here.
|
Register a new user in the database. Company id is directly given here.
|
||||||
"""
|
"""
|
||||||
user_in = schemas.UserCreate(email=email, password=password, fullname=fullname, company_id=1, department_id=department_id)
|
user_in = schemas.UserCreate(email=email, password=password, username=fullname, company_id=1, department_id=department_id)
|
||||||
user = crud.user.create(db, obj_in=user_in)
|
user = crud.user.create(db, obj_in=user_in)
|
||||||
user_role_name = Role.GUEST["name"]
|
user_role_name = Role.GUEST["name"]
|
||||||
company = crud.company.get(db, 1)
|
company = crud.company.get(db, 1)
|
||||||
@ -229,9 +229,7 @@ def register(
|
|||||||
db: Session = Depends(deps.get_db),
|
db: Session = Depends(deps.get_db),
|
||||||
email: str = Body(...),
|
email: str = Body(...),
|
||||||
fullname: str = Body(...),
|
fullname: str = Body(...),
|
||||||
# password: str = Body(...),
|
password: str = Body(...),
|
||||||
company_id: int = Body(None, title="Company ID",
|
|
||||||
description="Company ID for the user (if applicable)"),
|
|
||||||
department_id: int = Body(None, title="Department ID",
|
department_id: int = Body(None, title="Department ID",
|
||||||
description="Department name for the user (if applicable)"),
|
description="Department name for the user (if applicable)"),
|
||||||
role_name: str = Body(None, title="Role Name",
|
role_name: str = Body(None, title="Role Name",
|
||||||
@ -257,8 +255,11 @@ def register(
|
|||||||
status_code=409,
|
status_code=409,
|
||||||
detail="The user with this email already exists!",
|
detail="The user with this email already exists!",
|
||||||
)
|
)
|
||||||
random_password = security.generate_random_password()
|
# random_password = security.generate_random_password()
|
||||||
|
random_password = password
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
company_id = current_user.company_id
|
||||||
if company_id:
|
if company_id:
|
||||||
company = crud.company.get(db, company_id)
|
company = crud.company.get(db, company_id)
|
||||||
if not company:
|
if not company:
|
||||||
@ -280,20 +281,21 @@ def register(
|
|||||||
)
|
)
|
||||||
user_role_name = role_name or Role.GUEST["name"]
|
user_role_name = role_name or Role.GUEST["name"]
|
||||||
user_role = create_user_role(db, user, user_role_name, company)
|
user_role = create_user_role(db, user, user_role_name, company)
|
||||||
log_audit(model='user_roles', action='creation',
|
log_audit(model='user_roles', action='create',
|
||||||
details={'detail': "User role created successfully.", }, user_id=current_user.id)
|
details={'detail': "User role created successfully.", }, user_id=current_user.id)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(traceback.format_exc())
|
print(traceback.format_exc())
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=500,
|
status_code=500,
|
||||||
detail="Unable to create account.",
|
detail="Unable to create account.",
|
||||||
)
|
)
|
||||||
|
|
||||||
token_payload = create_token_payload(user, user_role)
|
token_payload = create_token_payload(user, user_role)
|
||||||
response_dict = {
|
response_dict = {
|
||||||
"access_token": security.create_access_token(token_payload, expires_delta=timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)),
|
"access_token": security.create_access_token(token_payload, expires_delta=timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)),
|
||||||
"refresh_token": security.create_refresh_token(token_payload, expires_delta=timedelta(minutes=settings.REFRESH_TOKEN_EXPIRE_MINUTES)),
|
"refresh_token": security.create_refresh_token(token_payload, expires_delta=timedelta(minutes=settings.REFRESH_TOKEN_EXPIRE_MINUTES)),
|
||||||
"token_type": "bearer",
|
"token_type": "bearer",
|
||||||
"password": random_password,
|
|
||||||
}
|
}
|
||||||
log_audit(model='User', action='creation',
|
log_audit(model='User', action='creation',
|
||||||
details={'detail': "User created successfully.",'username':fullname}, user_id=current_user.id)
|
details={'detail': "User created successfully.",'username':fullname}, user_id=current_user.id)
|
||||||
|
@ -165,22 +165,21 @@ def update_department(
|
|||||||
|
|
||||||
updated_department = crud.department.update(
|
updated_department = crud.department.update(
|
||||||
db=db, db_obj=department, obj_in=department_in)
|
db=db, db_obj=department, obj_in=department_in)
|
||||||
updated_department = jsonable_encoder(updated_department)
|
|
||||||
|
|
||||||
details = {
|
details = {
|
||||||
'detail': 'Department updated successfully!',
|
|
||||||
'department_id': department.id,
|
'department_id': department.id,
|
||||||
'old_department_name': old_name,
|
'before': {
|
||||||
'new_department_name': department.name,
|
'name': old_name
|
||||||
|
},
|
||||||
|
'after': {
|
||||||
|
'name': department_in.name
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log_audit_department(request, db, current_user, 'update', details)
|
log_audit_department(request, db, current_user, 'update', details)
|
||||||
|
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=status.HTTP_200_OK,
|
status_code=status.HTTP_200_OK,
|
||||||
content={
|
content={
|
||||||
"message": f"Department updated successfully",
|
"message": f"Department updated successfully",
|
||||||
"department": updated_department
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -212,19 +211,15 @@ def delete_department(
|
|||||||
raise HTTPException(status_code=404, detail="Department not found")
|
raise HTTPException(status_code=404, detail="Department not found")
|
||||||
|
|
||||||
details = {
|
details = {
|
||||||
'detail': "Department deleted successfully!",
|
|
||||||
'department_id': department.id,
|
'department_id': department.id,
|
||||||
'department_name': department.name
|
'department_name': department.name
|
||||||
}
|
}
|
||||||
deleted_department = crud.department.remove(db=db, id=department_id)
|
crud.department.remove(db=db, id=department_id)
|
||||||
log_audit_department(request, db, current_user, 'delete', details)
|
log_audit_department(request, db, current_user, 'delete', details)
|
||||||
|
|
||||||
deleted_department = jsonable_encoder(deleted_department)
|
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=status.HTTP_200_OK,
|
status_code=status.HTTP_200_OK,
|
||||||
content={
|
content={
|
||||||
"message": "Department deleted successfully",
|
"message": "Department deleted successfully",
|
||||||
"department": deleted_department,
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -1,18 +1,22 @@
|
|||||||
import traceback
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
import aiofiles
|
||||||
|
import traceback
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from typing import Any, List
|
from typing import Any, List
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from fastapi.responses import JSONResponse
|
from fastapi import APIRouter, Depends, HTTPException, status, Security, Request, File, UploadFile
|
||||||
from fastapi.encoders import jsonable_encoder
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status, Security, Request
|
|
||||||
|
|
||||||
from private_gpt.users.api import deps
|
from private_gpt.users.api import deps
|
||||||
|
from private_gpt.constants import UNCHECKED_DIR
|
||||||
from private_gpt.users.constants.role import Role
|
from private_gpt.users.constants.role import Role
|
||||||
from private_gpt.users import crud, models, schemas
|
from private_gpt.users import crud, models, schemas
|
||||||
from private_gpt.users.schemas import Document
|
from private_gpt.server.ingest.ingest_router import create_documents, ingest
|
||||||
|
from private_gpt.users.models.makerchecker import MakerCheckerActionType, MakerCheckerStatus
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
router = APIRouter(prefix='/documents', tags=['Documents'])
|
router = APIRouter(prefix='/documents', tags=['Documents'])
|
||||||
|
|
||||||
|
|
||||||
@ -27,9 +31,12 @@ def list_files(
|
|||||||
scopes=[Role.ADMIN["name"], Role.SUPER_ADMIN["name"], Role.OPERATOR["name"]],
|
scopes=[Role.ADMIN["name"], Role.SUPER_ADMIN["name"], Role.OPERATOR["name"]],
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
|
"""
|
||||||
|
List the documents based on the role.
|
||||||
|
"""
|
||||||
def get_username(db, id):
|
def get_username(db, id):
|
||||||
user = crud.user.get_by_id(db=db, id=id)
|
user = crud.user.get_by_id(db=db, id=id)
|
||||||
return user.fullname
|
return user.username
|
||||||
|
|
||||||
try:
|
try:
|
||||||
role = current_user.user_role.role.name if current_user.user_role else None
|
role = current_user.user_role.role.name if current_user.user_role else None
|
||||||
@ -119,7 +126,6 @@ def list_files_by_department(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@router.post('/update', response_model=schemas.DocumentEnable)
|
@router.post('/update', response_model=schemas.DocumentEnable)
|
||||||
def update_document(
|
def update_document(
|
||||||
request: Request,
|
request: Request,
|
||||||
@ -131,6 +137,9 @@ def update_document(
|
|||||||
scopes=[Role.SUPER_ADMIN["name"], Role.OPERATOR["name"]],
|
scopes=[Role.SUPER_ADMIN["name"], Role.OPERATOR["name"]],
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
|
'''
|
||||||
|
Function to enable or disable document.
|
||||||
|
'''
|
||||||
try:
|
try:
|
||||||
document = crud.documents.get_by_filename(
|
document = crud.documents.get_by_filename(
|
||||||
db, file_name=document_in.filename)
|
db, file_name=document_in.filename)
|
||||||
@ -161,7 +170,7 @@ def update_document(
|
|||||||
@router.post('/department_update', response_model=schemas.DocumentList)
|
@router.post('/department_update', response_model=schemas.DocumentList)
|
||||||
def update_department(
|
def update_department(
|
||||||
request: Request,
|
request: Request,
|
||||||
document_in: schemas.DocumentDepartmentUpdate ,
|
document_in: schemas.DocumentDepartmentUpdate,
|
||||||
db: Session = Depends(deps.get_db),
|
db: Session = Depends(deps.get_db),
|
||||||
log_audit: models.Audit = Depends(deps.get_audit_logger),
|
log_audit: models.Audit = Depends(deps.get_audit_logger),
|
||||||
current_user: models.User = Security(
|
current_user: models.User = Security(
|
||||||
@ -182,7 +191,6 @@ def update_department(
|
|||||||
detail="Document with this filename doesn't exist!",
|
detail="Document with this filename doesn't exist!",
|
||||||
)
|
)
|
||||||
department_ids = [int(number) for number in document_in.departments]
|
department_ids = [int(number) for number in document_in.departments]
|
||||||
print("Department update: ", document_in, department_ids)
|
|
||||||
for department_id in department_ids:
|
for department_id in department_ids:
|
||||||
db.execute(models.document_department_association.insert().values(document_id=document.id, department_id=department_id))
|
db.execute(models.document_department_association.insert().values(document_id=document.id, department_id=department_id))
|
||||||
log_audit(
|
log_audit(
|
||||||
@ -202,3 +210,120 @@ def update_department(
|
|||||||
detail="Internal Server Error.",
|
detail="Internal Server Error.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post('/upload', response_model=schemas.Document)
|
||||||
|
async def upload_documents(
|
||||||
|
request: Request,
|
||||||
|
departments: schemas.DocumentDepartmentList = Depends(),
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
|
||||||
|
log_audit: models.Audit = Depends(deps.get_audit_logger),
|
||||||
|
db: Session = Depends(deps.get_db),
|
||||||
|
current_user: models.User = Security(
|
||||||
|
deps.get_current_user,
|
||||||
|
scopes=[Role.ADMIN["name"],
|
||||||
|
Role.SUPER_ADMIN["name"],
|
||||||
|
Role.OPERATOR["name"]],
|
||||||
|
)
|
||||||
|
):
|
||||||
|
"""Upload the documents."""
|
||||||
|
try:
|
||||||
|
original_filename = file.filename
|
||||||
|
if original_filename is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="No file name provided",
|
||||||
|
)
|
||||||
|
upload_path = Path(f"{UNCHECKED_DIR}/{original_filename}")
|
||||||
|
try:
|
||||||
|
contents = await file.read()
|
||||||
|
async with aiofiles.open(upload_path, 'wb') as f:
|
||||||
|
await f.write(contents)
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Internal Server Error: Unable to ingest file.",
|
||||||
|
)
|
||||||
|
document = await create_documents(db, original_filename, current_user, departments, log_audit)
|
||||||
|
logger.info(
|
||||||
|
f"{original_filename} is uploaded by {current_user.username} in {departments.departments_ids}")
|
||||||
|
return document
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
print(traceback.print_exc())
|
||||||
|
raise
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(traceback.print_exc())
|
||||||
|
logger.error(f"There was an error uploading the file(s): {str(e)}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Internal Server Error: Unable to upload file.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post('/verify', response_model=schemas.Document)
|
||||||
|
async def verify_documents(
|
||||||
|
request: Request,
|
||||||
|
checker_in: schemas.DocumentUpdate = Depends(),
|
||||||
|
log_audit: models.Audit = Depends(deps.get_audit_logger),
|
||||||
|
db: Session = Depends(deps.get_db),
|
||||||
|
current_user: models.User = Security(
|
||||||
|
deps.get_current_user,
|
||||||
|
scopes=[Role.ADMIN["name"],
|
||||||
|
Role.SUPER_ADMIN["name"],
|
||||||
|
Role.OPERATOR["name"]],
|
||||||
|
)
|
||||||
|
):
|
||||||
|
"""Upload the documents."""
|
||||||
|
try:
|
||||||
|
document = crud.documents.get_by_id(db, id=checker_in.id)
|
||||||
|
if not document:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="Document not found!",
|
||||||
|
)
|
||||||
|
unchecked_path = Path(f"{UNCHECKED_DIR}/{document.filename}")
|
||||||
|
if checker_in.status == MakerCheckerStatus.APPROVED:
|
||||||
|
checker = schemas.DocumentCheckerUpdate(
|
||||||
|
status=MakerCheckerStatus.APPROVED,
|
||||||
|
is_enabled=checker_in.is_enabled,
|
||||||
|
verified_at=datetime.now(),
|
||||||
|
verified_by=current_user.id,
|
||||||
|
)
|
||||||
|
crud.documents.update(db=db, db_obj= document, obj_in=checker)
|
||||||
|
|
||||||
|
if document.doc_type_id == 2:
|
||||||
|
return ingest(request, unchecked_path)
|
||||||
|
elif document.doc_type_id == 3:
|
||||||
|
return ingest(request, unchecked_path)
|
||||||
|
else:
|
||||||
|
return ingest(request, unchecked_path)
|
||||||
|
|
||||||
|
elif checker_in.status == MakerCheckerStatus.REJECTED:
|
||||||
|
checker = schemas.DocumentCheckerUpdate(
|
||||||
|
status=MakerCheckerStatus.REJECTED,
|
||||||
|
is_enabled=checker_in.is_enabled,
|
||||||
|
verified_at=datetime.now(),
|
||||||
|
verified_by=current_user.id,
|
||||||
|
)
|
||||||
|
crud.documents.update(db=db, db_obj=document, obj_in=checker)
|
||||||
|
os.remove(unchecked_path)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail="Cannot change status to PENDING!",
|
||||||
|
)
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
print(traceback.print_exc())
|
||||||
|
raise
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(traceback.print_exc())
|
||||||
|
logger.error(f"There was an error uploading the file(s): {str(e)}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Internal Server Error: Unable to upload file.",
|
||||||
|
)
|
||||||
|
@ -107,7 +107,7 @@ def create_user(
|
|||||||
'admin_id': current_user.id,
|
'admin_id': current_user.id,
|
||||||
'user_id': user.id,
|
'user_id': user.id,
|
||||||
'email': user.email,
|
'email': user.email,
|
||||||
'fullname': user.fullname,
|
'username': user.username,
|
||||||
'company_id': user.company_id,
|
'company_id': user.company_id,
|
||||||
'department_id': user.department_id,
|
'department_id': user.department_id,
|
||||||
}
|
}
|
||||||
@ -130,28 +130,23 @@ def update_username(
|
|||||||
"""
|
"""
|
||||||
Update own username.
|
Update own username.
|
||||||
"""
|
"""
|
||||||
user_in = schemas.UserUpdate(fullname=update_in.fullname, email=current_user.email, company_id=current_user.company_id)
|
user_in = schemas.UsernameUpdate(
|
||||||
old_fullname = current_user.fullname
|
username=update_in.username)
|
||||||
|
|
||||||
|
old_username = current_user.username
|
||||||
user = crud.user.update(db, db_obj=current_user, obj_in=user_in)
|
user = crud.user.update(db, db_obj=current_user, obj_in=user_in)
|
||||||
user_data = schemas.UserBaseSchema(
|
|
||||||
email=user.email,
|
|
||||||
fullname=user.fullname,
|
|
||||||
company_id=user.company_id,
|
|
||||||
department_id=user.department_id,
|
|
||||||
)
|
|
||||||
details = {
|
details = {
|
||||||
'old_fullname': old_fullname,
|
'before': old_username,
|
||||||
'new_fullname': user.fullname,
|
'after': user.username,
|
||||||
}
|
}
|
||||||
log_audit_user(request=request, db=db, current_user=current_user, action='update_username', details=details)
|
|
||||||
|
log_audit_user(request=request, db=db, current_user=current_user, action='update', details=details)
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=status.HTTP_200_OK,
|
status_code=status.HTTP_200_OK,
|
||||||
content={"message": "Username updated successfully",
|
content={"message": "Username updated successfully"},
|
||||||
"user": jsonable_encoder(user_data)},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/me", response_model=schemas.User)
|
@router.get("/me", response_model=schemas.User)
|
||||||
def read_user_me(
|
def read_user_me(
|
||||||
db: Session = Depends(deps.get_db),
|
db: Session = Depends(deps.get_db),
|
||||||
@ -163,7 +158,7 @@ def read_user_me(
|
|||||||
role = current_user.user_role.role.name if current_user.user_role else None
|
role = current_user.user_role.role.name if current_user.user_role else None
|
||||||
user_data = schemas.Profile(
|
user_data = schemas.Profile(
|
||||||
email=current_user.email,
|
email=current_user.email,
|
||||||
fullname=current_user.fullname,
|
username=current_user.username,
|
||||||
company_id = current_user.company_id,
|
company_id = current_user.company_id,
|
||||||
department_id=current_user.department_id,
|
department_id=current_user.department_id,
|
||||||
role =role
|
role =role
|
||||||
@ -193,24 +188,14 @@ def change_password(
|
|||||||
current_user.hashed_password = new_password_hashed
|
current_user.hashed_password = new_password_hashed
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
role = current_user.user_role.role.name if current_user.user_role else None
|
|
||||||
user_data = schemas.UserBaseSchema(
|
|
||||||
id=current_user.id,
|
|
||||||
email=current_user.email,
|
|
||||||
fullname=current_user.fullname,
|
|
||||||
company_id= current_user.company_id,
|
|
||||||
department_id=current_user.department_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
details = {
|
details = {
|
||||||
'detail': 'Password changed successfully!',
|
'detail': 'Password changed successfully!',
|
||||||
'user_id': current_user.id,
|
|
||||||
}
|
}
|
||||||
log_audit_user(request, db, current_user, 'change_password', details)
|
log_audit_user(request, db, current_user, 'update', details)
|
||||||
|
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=status.HTTP_200_OK,
|
status_code=status.HTTP_200_OK,
|
||||||
content={"message": "Password changed successfully", "user": jsonable_encoder(user_data)},
|
content={"message": "Password changed successfully"},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -235,49 +220,6 @@ def read_user_by_id(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.put("/{user_id}", response_model=schemas.User)
|
|
||||||
def update_user(
|
|
||||||
*,
|
|
||||||
request: Request,
|
|
||||||
db: Session = Depends(deps.get_db),
|
|
||||||
user_id: int,
|
|
||||||
user_in: schemas.UserUpdate,
|
|
||||||
current_user: models.User = Security(
|
|
||||||
deps.get_current_user,
|
|
||||||
scopes=[Role.ADMIN["name"], Role.SUPER_ADMIN["name"]],
|
|
||||||
),
|
|
||||||
) -> Any:
|
|
||||||
"""
|
|
||||||
Update a user.
|
|
||||||
"""
|
|
||||||
user = crud.user.get(db, id=user_id)
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
|
||||||
detail="The user with this id does not exist in the system",
|
|
||||||
)
|
|
||||||
user = crud.user.update(db, db_obj=user, obj_in=user_in)
|
|
||||||
user_data = schemas.UserBaseSchema(
|
|
||||||
id=user.id,
|
|
||||||
email=user.email,
|
|
||||||
fullname=user.fullname,
|
|
||||||
company_id=user.company_id,
|
|
||||||
department_id=user.department_id,
|
|
||||||
)
|
|
||||||
details = {
|
|
||||||
'user_id': user.id,
|
|
||||||
'email': user.email,
|
|
||||||
'fullname': user.fullname,
|
|
||||||
'company_id': user.company_id,
|
|
||||||
'department_id': user.department_id,
|
|
||||||
}
|
|
||||||
log_audit_user(request, db, current_user, 'update user', details)
|
|
||||||
return JSONResponse(
|
|
||||||
status_code=status.HTTP_200_OK,
|
|
||||||
content={"message": "User updated successfully", "user": jsonable_encoder(user_data)},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/")
|
@router.get("/")
|
||||||
def home_page(
|
def home_page(
|
||||||
@ -316,18 +258,9 @@ def admin_change_password(
|
|||||||
user.hashed_password = new_password_hashed
|
user.hashed_password = new_password_hashed
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
user_data = schemas.UserBaseSchema(
|
|
||||||
id=user.id,
|
|
||||||
email=user.email,
|
|
||||||
fullname=user.fullname,
|
|
||||||
company_id=user.company_id,
|
|
||||||
department_id=user.department_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=status.HTTP_200_OK,
|
status_code=status.HTTP_200_OK,
|
||||||
content={"message": "User password changed successfully",
|
content={"message": "User password changed successfully"},
|
||||||
"user": jsonable_encoder(user_data)},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -349,9 +282,8 @@ def delete_user(
|
|||||||
user = crud.user.get(db, id=user_id)
|
user = crud.user.get(db, id=user_id)
|
||||||
|
|
||||||
details = {
|
details = {
|
||||||
'detail': "user deleted successfully!",
|
|
||||||
'email': user.email,
|
'email': user.email,
|
||||||
'fullname': user.fullname,
|
'username': user.username,
|
||||||
'department_id': user.department_id,
|
'department_id': user.department_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -378,10 +310,9 @@ def admin_update_user(
|
|||||||
),
|
),
|
||||||
) -> Any:
|
) -> Any:
|
||||||
"""
|
"""
|
||||||
Update the user by the Admin/Super_ADMIN
|
Update the user by the Admin/Super_ADMIN/Operator
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
|
||||||
existing_user = crud.user.get_by_id(db, id=user_update.id)
|
existing_user = crud.user.get_by_id(db, id=user_update.id)
|
||||||
if existing_user is None:
|
if existing_user is None:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
@ -389,13 +320,13 @@ def admin_update_user(
|
|||||||
detail=f"User not found with id: {user_update.id}",
|
detail=f"User not found with id: {user_update.id}",
|
||||||
)
|
)
|
||||||
old_detail = {
|
old_detail = {
|
||||||
'fullname': existing_user.fullname,
|
'username': existing_user.username,
|
||||||
'role': existing_user.user_role.role.name,
|
'role': existing_user.user_role.role.name,
|
||||||
'department': existing_user.department_id
|
'department': existing_user.department_id
|
||||||
}
|
}
|
||||||
if not (existing_user.fullname == user_update.fullname):
|
if not (existing_user.username == user_update.username):
|
||||||
fullname = crud.user.get_by_name(db, name=user_update.fullname)
|
username = crud.user.get_by_name(db, name=user_update.username)
|
||||||
if fullname:
|
if username:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=409,
|
status_code=409,
|
||||||
detail="The user with this username already exists!",
|
detail="The user with this username already exists!",
|
||||||
@ -414,18 +345,18 @@ def admin_update_user(
|
|||||||
role_id=role.id,
|
role_id=role.id,
|
||||||
)
|
)
|
||||||
role = crud.user_role.update(db, db_obj=user_role, obj_in=role_in)
|
role = crud.user_role.update(db, db_obj=user_role, obj_in=role_in)
|
||||||
user_update_in = schemas.UserAdmin(fullname=user_update.fullname, department_id=user_update.department_id)
|
user_update_in = schemas.UserAdmin(username=user_update.username, department_id=user_update.department_id)
|
||||||
|
|
||||||
new_detail = {
|
new_detail = {
|
||||||
'fullname': user_update.fullname,
|
'username': user_update.username,
|
||||||
'role': user_update.role,
|
'role': user_update.role,
|
||||||
'department': user_update.department_id
|
'department': user_update.department_id
|
||||||
}
|
}
|
||||||
details = {
|
details = {
|
||||||
'old detail': old_detail,
|
'before': old_detail,
|
||||||
'new detail': new_detail,
|
'after': new_detail,
|
||||||
}
|
}
|
||||||
log_audit_user(request, db, current_user, 'admin_update', details)
|
log_audit_user(request, db, current_user, 'update', details)
|
||||||
user = crud.user.get_by_id(db, id=existing_user.id)
|
user = crud.user.get_by_id(db, id=existing_user.id)
|
||||||
crud.user.update(db, db_obj=user, obj_in=user_update_in)
|
crud.user.update(db, db_obj=user, obj_in=user_update_in)
|
||||||
|
|
||||||
@ -433,6 +364,7 @@ def admin_update_user(
|
|||||||
status_code=status.HTTP_200_OK,
|
status_code=status.HTTP_200_OK,
|
||||||
content={"message": "User updated successfully"}
|
content={"message": "User updated successfully"}
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(traceback.print_exc())
|
print(traceback.print_exc())
|
||||||
return HTTPException(
|
return HTTPException(
|
||||||
|
14
private_gpt/users/constants/type.py
Normal file
14
private_gpt/users/constants/type.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
class Type:
|
||||||
|
"""
|
||||||
|
Constants for the various types for document types for uploading
|
||||||
|
"""
|
||||||
|
|
||||||
|
REGULAR = {
|
||||||
|
"type": "REGULAR",
|
||||||
|
}
|
||||||
|
SCANNED = {
|
||||||
|
"type": "SCANNED",
|
||||||
|
}
|
||||||
|
BOTH = {
|
||||||
|
"type": "BOTH",
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String
|
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String
|
||||||
from private_gpt.users.db.base_class import Base
|
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.postgresql import JSONB
|
||||||
|
from private_gpt.users.db.base_class import Base
|
||||||
|
|
||||||
|
|
||||||
class Audit(Base):
|
class Audit(Base):
|
||||||
@ -9,11 +9,12 @@ class Audit(Base):
|
|||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
timestamp = Column(DateTime, nullable=False, default=datetime.utcnow)
|
timestamp = Column(DateTime, nullable=False, default=datetime.utcnow)
|
||||||
user_id = Column(Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True)
|
user_id = Column(Integer, ForeignKey(
|
||||||
model = Column(String, nullable=False)
|
"users.id", ondelete="SET NULL"), nullable=True)
|
||||||
action = Column(String, nullable=False)
|
model = Column(String(100), nullable=False)
|
||||||
|
action = Column(String(50), nullable=False)
|
||||||
details = Column(JSONB, nullable=True)
|
details = Column(JSONB, nullable=True)
|
||||||
ip_address = Column(String, nullable=True)
|
ip_address = Column(String(45), nullable=True)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<Audit(id={self.id}, timestamp={self.timestamp}, user_id={self.user_id}, model={self.model}, action={self.action}, details={self.details})>"
|
return f"<Audit(id={self.id}, timestamp={self.timestamp}, user_id={self.user_id}, model={self.model}, action={self.action}, details={self.details})>"
|
||||||
|
@ -1,13 +1,32 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sqlalchemy import Boolean, event, select, func, update, insert
|
from sqlalchemy.orm import relationship
|
||||||
from sqlalchemy.orm import relationship, backref
|
from sqlalchemy import Boolean, event, select, func, update
|
||||||
from sqlalchemy import Column, Integer, String, ForeignKey, DateTime
|
from sqlalchemy import Column, Integer, String, ForeignKey, DateTime
|
||||||
|
|
||||||
from private_gpt.users.models.department import Department
|
|
||||||
from private_gpt.users.models.makerchecker import MakerChecker
|
|
||||||
from private_gpt.users.db.base_class import Base
|
from private_gpt.users.db.base_class import Base
|
||||||
|
from private_gpt.users.models.department import Department
|
||||||
from private_gpt.users.models.document_department import document_department_association
|
from private_gpt.users.models.document_department import document_department_association
|
||||||
|
from sqlalchemy import Enum
|
||||||
|
from enum import Enum as PythonEnum
|
||||||
|
|
||||||
|
class MakerCheckerStatus(PythonEnum):
|
||||||
|
PENDING = 'pending'
|
||||||
|
APPROVED = 'approved'
|
||||||
|
REJECTED = 'rejected'
|
||||||
|
|
||||||
|
|
||||||
|
class MakerCheckerActionType(PythonEnum):
|
||||||
|
INSERT = 'insert'
|
||||||
|
UPDATE = 'update'
|
||||||
|
DELETE = 'delete'
|
||||||
|
|
||||||
|
class DocumentType(Base):
|
||||||
|
"""Models a document table"""
|
||||||
|
__tablename__ = "document_type"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
type = Column(String(225), nullable=False, unique=True)
|
||||||
|
documents = relationship("Document", back_populates='doc_type')
|
||||||
|
|
||||||
|
|
||||||
class Document(Base):
|
class Document(Base):
|
||||||
@ -29,20 +48,26 @@ class Document(Base):
|
|||||||
uploaded_by_user = relationship(
|
uploaded_by_user = relationship(
|
||||||
"User", back_populates="uploaded_documents")
|
"User", back_populates="uploaded_documents")
|
||||||
is_enabled = Column(Boolean, default=True)
|
is_enabled = Column(Boolean, default=True)
|
||||||
# Use document_department_association as the secondary for the relationship
|
verified = Column(Boolean, default=False)
|
||||||
verified = Column(Boolean, default=False) # Added verified column
|
|
||||||
|
doc_type_id = Column(Integer, ForeignKey("document_type.id"))
|
||||||
|
doc_type = relationship("DocumentType", back_populates='documents')
|
||||||
|
|
||||||
|
action_type = Column(Enum(MakerCheckerActionType), nullable=False,
|
||||||
|
default=MakerCheckerActionType.INSERT) # 'insert' or 'update' or 'delete'
|
||||||
|
# 'pending', 'approved', or 'rejected'
|
||||||
|
status = Column(Enum(MakerCheckerStatus), nullable=False,
|
||||||
|
default=MakerCheckerStatus.PENDING)
|
||||||
|
|
||||||
|
verified_at = Column(DateTime, nullable=True)
|
||||||
|
verified_by = Column(Integer, ForeignKey("users.id"), nullable=True)
|
||||||
|
|
||||||
|
|
||||||
departments = relationship(
|
departments = relationship(
|
||||||
"Department",
|
"Department",
|
||||||
secondary=document_department_association,
|
secondary=document_department_association,
|
||||||
back_populates="documents"
|
back_populates="documents"
|
||||||
)
|
)
|
||||||
# Relationship with MakerChecker
|
|
||||||
maker_checker_entry = relationship(
|
|
||||||
"MakerChecker",
|
|
||||||
backref=backref("document", uselist=False),
|
|
||||||
foreign_keys="[MakerChecker.record_id]",
|
|
||||||
primaryjoin="and_(MakerChecker.table_name=='document', MakerChecker.record_id==Document.id)",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Event listeners for updating total_documents in Department
|
# Event listeners for updating total_documents in Department
|
||||||
@event.listens_for(Document, 'after_insert')
|
@event.listens_for(Document, 'after_insert')
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from private_gpt.users.db.base_class import Base
|
from private_gpt.users.db.base_class import Base
|
||||||
from sqlalchemy import Column, Integer, String, Table, ForeignKey
|
from sqlalchemy import Column, Integer, Table, ForeignKey
|
||||||
|
|
||||||
document_department_association = Table(
|
document_department_association = Table(
|
||||||
"document_department_association",
|
"document_department_association",
|
||||||
|
@ -1,34 +1,27 @@
|
|||||||
from datetime import datetime
|
# from sqlalchemy import Enum
|
||||||
from sqlalchemy import Boolean, event, select, func, update
|
# from datetime import datetime
|
||||||
from sqlalchemy.orm import relationship
|
# from enum import Enum as PythonEnum
|
||||||
from sqlalchemy import Column, Integer, String, ForeignKey, DateTime
|
# from private_gpt.users.db.base_class import Base
|
||||||
from private_gpt.users.db.base_class import Base
|
# from sqlalchemy import Column, Integer, ForeignKey, DateTime
|
||||||
from sqlalchemy import Enum
|
|
||||||
from enum import Enum as PythonEnum
|
|
||||||
|
|
||||||
class MakerCheckerStatus(PythonEnum):
|
# class MakerCheckerStatus(PythonEnum):
|
||||||
PENDING = 'pending'
|
# PENDING = 'pending'
|
||||||
APPROVED = 'approved'
|
# APPROVED = 'approved'
|
||||||
REJECTED = 'rejected'
|
# REJECTED = 'rejected'
|
||||||
|
|
||||||
|
|
||||||
class MakerCheckerActionType(PythonEnum):
|
# class MakerCheckerActionType(PythonEnum):
|
||||||
INSERT = 'insert'
|
# INSERT = 'insert'
|
||||||
UPDATE = 'update'
|
# UPDATE = 'update'
|
||||||
|
# DELETE = 'delete'
|
||||||
|
|
||||||
|
|
||||||
class MakerChecker(Base):
|
# class MakerChecker(Base):
|
||||||
"""Models a maker-checker table"""
|
# """Models a maker-checker base"""
|
||||||
__tablename__ = "maker_checker"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, index=True)
|
# action_type = Column(Enum(MakerCheckerActionType), nullable=False, default=MakerCheckerActionType.INSERT) # 'insert' or 'update' or 'delete'
|
||||||
table_name = Column(String(50), nullable=False)
|
# status = Column(Enum(MakerCheckerStatus), nullable=False, default=MakerCheckerStatus.PENDING) # 'pending', 'approved', or 'rejected'
|
||||||
record_id = Column(Integer, nullable=False)
|
|
||||||
action_type = Column(Enum(MakerCheckerActionType), nullable=False, default=MakerCheckerActionType.INSERT) # 'insert' or 'update'
|
# verified_at = Column(DateTime, nullable=True)
|
||||||
status = Column(Enum(MakerCheckerStatus), nullable=False, default=MakerCheckerStatus.PENDING) # 'pending', 'approved', or 'rejected'
|
# verified_by = Column(Integer, ForeignKey("users.id"), nullable=True)
|
||||||
created_at = Column(DateTime, default=datetime.datetime.utcnow)
|
|
||||||
verified_at = Column(DateTime, nullable=True)
|
|
||||||
verified_by = Column(Integer, ForeignKey("users.id"), nullable=True)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<MakerChecker {self.table_name} {self.record_id}>"
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
from sqlalchemy import Column, Integer, String, Boolean, Float, ForeignKey, DateTime
|
from sqlalchemy import Column, Integer, ForeignKey, DateTime
|
||||||
from sqlalchemy.orm import relationship
|
from sqlalchemy.orm import relationship
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from fastapi import Depends
|
|
||||||
from private_gpt.users.db.base_class import Base
|
from private_gpt.users.db.base_class import Base
|
||||||
|
|
||||||
|
|
||||||
@ -11,8 +10,8 @@ class Subscription(Base):
|
|||||||
|
|
||||||
sub_id = Column(Integer, primary_key=True, index=True)
|
sub_id = Column(Integer, primary_key=True, index=True)
|
||||||
company_id = Column(Integer, ForeignKey("companies.id"))
|
company_id = Column(Integer, ForeignKey("companies.id"))
|
||||||
start_date = Column(DateTime, default=datetime.utcnow())
|
start_date = Column(DateTime, default=datetime.utcnow)
|
||||||
end_date = Column(DateTime, default=datetime.utcnow() + timedelta(days=30)) # Example: 30 days subscription period
|
end_date = Column(DateTime, default=lambda: datetime.utcnow() + timedelta(days=30))
|
||||||
|
|
||||||
company = relationship("Company", back_populates="subscriptions")
|
company = relationship("Company", back_populates="subscriptions")
|
||||||
|
|
||||||
|
@ -1,20 +1,18 @@
|
|||||||
import datetime
|
from datetime import datetime
|
||||||
from sqlalchemy import (
|
from sqlalchemy import (
|
||||||
Column,
|
Column,
|
||||||
String,
|
String,
|
||||||
Integer,
|
Integer,
|
||||||
Boolean,
|
Boolean,
|
||||||
UniqueConstraint,
|
UniqueConstraint,
|
||||||
PrimaryKeyConstraint,
|
ForeignKey,
|
||||||
DateTime,
|
DateTime
|
||||||
ForeignKey
|
|
||||||
)
|
)
|
||||||
from sqlalchemy.orm import relationship, backref
|
from sqlalchemy.orm import relationship
|
||||||
from sqlalchemy import event, func, select, update, insert
|
from sqlalchemy import event, func, select, update
|
||||||
|
|
||||||
from private_gpt.users.db.base_class import Base
|
from private_gpt.users.db.base_class import Base
|
||||||
from private_gpt.users.models.department import Department
|
from private_gpt.users.models.department import Department
|
||||||
from private_gpt.users.models.makerchecker import MakerChecker
|
|
||||||
|
|
||||||
|
|
||||||
class User(Base):
|
class User(Base):
|
||||||
@ -24,19 +22,16 @@ class User(Base):
|
|||||||
|
|
||||||
email = Column(String(225), nullable=False, unique=True)
|
email = Column(String(225), nullable=False, unique=True)
|
||||||
hashed_password = Column(String, nullable=False)
|
hashed_password = Column(String, nullable=False)
|
||||||
fullname = Column(String(225), nullable=False, unique=True)
|
username = Column(String(225), nullable=False, unique=True)
|
||||||
|
|
||||||
UniqueConstraint("email", name="uq_user_email")
|
|
||||||
PrimaryKeyConstraint("id", name="pk_user_id")
|
|
||||||
|
|
||||||
is_active = Column(Boolean, default=False)
|
is_active = Column(Boolean, default=False)
|
||||||
|
|
||||||
last_login = Column(DateTime, nullable=True)
|
last_login = Column(DateTime, nullable=True, default=None)
|
||||||
created_at = Column(DateTime, default=datetime.datetime.utcnow)
|
created_at = Column(DateTime, default=datetime.now)
|
||||||
updated_at = Column(
|
updated_at = Column(
|
||||||
DateTime,
|
DateTime,
|
||||||
default=datetime.datetime.utcnow,
|
default=datetime.now,
|
||||||
onupdate=datetime.datetime.utcnow,
|
onupdate=datetime.now,
|
||||||
)
|
)
|
||||||
|
|
||||||
password_created = Column(DateTime, nullable=True)
|
password_created = Column(DateTime, nullable=True)
|
||||||
@ -45,46 +40,27 @@ class User(Base):
|
|||||||
company_id = Column(Integer, ForeignKey("companies.id"), nullable=True)
|
company_id = Column(Integer, ForeignKey("companies.id"), nullable=True)
|
||||||
company = relationship("Company", back_populates="users")
|
company = relationship("Company", back_populates="users")
|
||||||
|
|
||||||
uploaded_documents = relationship("Document", back_populates="uploaded_by_user")
|
uploaded_documents = relationship(
|
||||||
|
"Document", back_populates="uploaded_by_user")
|
||||||
|
|
||||||
user_role = relationship(
|
user_role = relationship(
|
||||||
"UserRole", back_populates="user", uselist=False, cascade="all, delete-orphan")
|
"UserRole", back_populates="user", uselist=False, cascade="all, delete-orphan")
|
||||||
|
|
||||||
department_id = Column(Integer, ForeignKey(
|
department_id = Column(
|
||||||
"departments.id"), nullable=False)
|
Integer, ForeignKey("departments.id"), nullable=False)
|
||||||
|
|
||||||
department = relationship("Department", back_populates="users")
|
department = relationship("Department", back_populates="users")
|
||||||
|
|
||||||
maker_checker_entry = relationship(
|
__table_args__ = (
|
||||||
"MakerChecker",
|
UniqueConstraint('username', name='unique_username_no_spacing'),
|
||||||
backref=backref("user", uselist=False),
|
|
||||||
foreign_keys="[MakerChecker.record_id]",
|
|
||||||
primaryjoin="and_(MakerChecker.table_name=='users', MakerChecker.record_id==User.id)",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Returns string representation of model instance"""
|
"""Returns string representation of model instance"""
|
||||||
return "<User {fullname!r}>".format(fullname=self.fullname)
|
return "<User {username!r}>".format(username=self.username)
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
UniqueConstraint('fullname', name='unique_username_no_spacing'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@event.listens_for(User, 'after_insert')
|
|
||||||
def create_maker_checker_entry(mapper, connection, target):
|
|
||||||
# Create a MakerChecker entry for the new User record
|
|
||||||
connection.execute(
|
|
||||||
insert(MakerChecker).values(
|
|
||||||
table_name='users',
|
|
||||||
record_id=target.id,
|
|
||||||
action_type='insert',
|
|
||||||
status='pending',
|
|
||||||
verified_at=None,
|
|
||||||
verified_by=None,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
# Event listeners
|
||||||
@event.listens_for(User, 'after_insert')
|
@event.listens_for(User, 'after_insert')
|
||||||
@event.listens_for(User, 'after_delete')
|
@event.listens_for(User, 'after_delete')
|
||||||
def update_total_users(mapper, connection, target):
|
def update_total_users(mapper, connection, target):
|
||||||
@ -101,27 +77,13 @@ def update_total_users(mapper, connection, target):
|
|||||||
|
|
||||||
@event.listens_for(User, 'before_insert')
|
@event.listens_for(User, 'before_insert')
|
||||||
def set_password_created(mapper, connection, target):
|
def set_password_created(mapper, connection, target):
|
||||||
target.password_created = datetime.datetime.utcnow()
|
target.password_created = datetime.utcnow()
|
||||||
connection.execute(
|
|
||||||
update(User)
|
|
||||||
.values(password_created=datetime.datetime.utcnow())
|
|
||||||
.where(User.id == target.id)
|
|
||||||
)
|
|
||||||
|
|
||||||
@event.listens_for(User, 'before_update', propagate=True)
|
@event.listens_for(User, 'before_update', propagate=True)
|
||||||
def check_password_expiry(mapper, connection, target):
|
def check_password_expiry(mapper, connection, target):
|
||||||
if target.password_created and (
|
if target.password_created and (
|
||||||
datetime.datetime.utcnow() - target.password_created).days > 90:
|
datetime.now() - target.password_created).days > 90:
|
||||||
target.is_active = False
|
target.is_active = False
|
||||||
connection.execute(
|
|
||||||
update(User)
|
|
||||||
.values(is_active=False)
|
|
||||||
.where(User.id == target.id)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
connection.execute(
|
target.is_active = True
|
||||||
update(User)
|
|
||||||
.values(is_active=True)
|
|
||||||
.where(User.id == target.id)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
from .role import Role, RoleCreate, RoleInDB, RoleUpdate
|
from .role import Role, RoleCreate, RoleInDB, RoleUpdate
|
||||||
from .token import TokenSchema, TokenPayload
|
from .token import TokenSchema, TokenPayload
|
||||||
from .user import User, UserCreate, UserInDB, UserUpdate, UserBaseSchema, Profile, UsernameUpdate, DeleteUser, UserAdminUpdate, UserAdmin, PasswordUpdate, SuperMakerUpdate
|
from .user import User, UserCreate, UserInDB, UserUpdate, UserBaseSchema, Profile, UsernameUpdate, DeleteUser, UserAdminUpdate, UserAdmin, PasswordUpdate
|
||||||
from .user_role import UserRole, UserRoleCreate, UserRoleInDB, UserRoleUpdate
|
from .user_role import UserRole, UserRoleCreate, UserRoleInDB, UserRoleUpdate
|
||||||
from .subscription import Subscription, SubscriptionBase, SubscriptionCreate, SubscriptionUpdate
|
from .subscription import Subscription, SubscriptionBase, SubscriptionCreate, SubscriptionUpdate
|
||||||
from .company import Company, CompanyBase, CompanyCreate, CompanyUpdate
|
from .company import Company, CompanyBase, CompanyCreate, CompanyUpdate
|
||||||
from .documents import Document, DocumentCreate, DocumentsBase, DocumentUpdate, DocumentList, DepartmentList, DocumentEnable, DocumentDepartmentUpdate
|
from .documents import Document, DocumentCreate, DocumentsBase, DocumentUpdate, DocumentList, DepartmentList, DocumentEnable, DocumentDepartmentUpdate, DocumentCheckerUpdate, DocumentMakerCreate, DocumentDepartmentList
|
||||||
from .department import Department, DepartmentCreate, DepartmentUpdate, DepartmentAdminCreate, DepartmentDelete, DocumentDepartmentList
|
from .department import Department, DepartmentCreate, DepartmentUpdate, DepartmentAdminCreate, DepartmentDelete
|
||||||
from .audit import AuditBase, AuditCreate, AuditUpdate, Audit, GetAudit
|
from .audit import AuditBase, AuditCreate, AuditUpdate, Audit, GetAudit
|
@ -1,6 +1,6 @@
|
|||||||
from typing import List, Optional
|
from typing import Optional
|
||||||
from pydantic import BaseModel, EmailStr
|
from pydantic import BaseModel
|
||||||
from fastapi import Form
|
|
||||||
|
|
||||||
|
|
||||||
class DepartmentBase(BaseModel):
|
class DepartmentBase(BaseModel):
|
||||||
@ -51,5 +51,3 @@ class Department(DepartmentBase):
|
|||||||
orm_mode = True
|
orm_mode = True
|
||||||
|
|
||||||
|
|
||||||
class DocumentDepartmentList(BaseModel):
|
|
||||||
departments_ids: str = Form(...)
|
|
@ -1,6 +1,7 @@
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import List
|
from typing import List
|
||||||
|
from fastapi import Form, UploadFile, File
|
||||||
|
|
||||||
class DocumentsBase(BaseModel):
|
class DocumentsBase(BaseModel):
|
||||||
filename: str
|
filename: str
|
||||||
@ -12,8 +13,10 @@ class DepartmentList(BaseModel):
|
|||||||
class DocumentCreate(DocumentsBase):
|
class DocumentCreate(DocumentsBase):
|
||||||
uploaded_by: int
|
uploaded_by: int
|
||||||
|
|
||||||
class DocumentUpdate(DocumentsBase):
|
class DocumentUpdate(BaseModel):
|
||||||
pass
|
id: int
|
||||||
|
status: str
|
||||||
|
is_enabled: bool
|
||||||
|
|
||||||
class DocumentEnable(DocumentsBase):
|
class DocumentEnable(DocumentsBase):
|
||||||
is_enabled: bool
|
is_enabled: bool
|
||||||
@ -38,3 +41,23 @@ class Document(BaseModel):
|
|||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
orm_mode = True
|
orm_mode = True
|
||||||
|
|
||||||
|
class DocumentMakerChecker(DocumentCreate):
|
||||||
|
action_type: str
|
||||||
|
status: str
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentMakerCreate(DocumentMakerChecker):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentCheckerUpdate(BaseModel):
|
||||||
|
status: str
|
||||||
|
is_enabled: bool
|
||||||
|
verified_at: datetime
|
||||||
|
verified_by: int
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentDepartmentList(BaseModel):
|
||||||
|
departments_ids: str = Form(...)
|
||||||
|
doc_type_id: int
|
||||||
|
@ -9,7 +9,7 @@ from private_gpt.users.schemas.company import Company
|
|||||||
|
|
||||||
class UserBaseSchema(BaseModel):
|
class UserBaseSchema(BaseModel):
|
||||||
email: EmailStr
|
email: EmailStr
|
||||||
fullname: str
|
username: str
|
||||||
company_id: int
|
company_id: int
|
||||||
department_id: int
|
department_id: int
|
||||||
|
|
||||||
@ -22,7 +22,7 @@ class UserCreate(UserBaseSchema):
|
|||||||
|
|
||||||
|
|
||||||
class UsernameUpdate(BaseModel):
|
class UsernameUpdate(BaseModel):
|
||||||
fullname: str
|
username: str
|
||||||
|
|
||||||
|
|
||||||
class UserUpdate(BaseModel):
|
class UserUpdate(BaseModel):
|
||||||
@ -37,7 +37,6 @@ class UserLoginSchema(BaseModel):
|
|||||||
class Config:
|
class Config:
|
||||||
arbitrary_types_allowed = True
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
|
|
||||||
class UserSchema(UserBaseSchema):
|
class UserSchema(UserBaseSchema):
|
||||||
id: int
|
id: int
|
||||||
user_role: Optional[UserRole] = None
|
user_role: Optional[UserRole] = None
|
||||||
@ -49,8 +48,6 @@ class UserSchema(UserBaseSchema):
|
|||||||
class Config:
|
class Config:
|
||||||
orm_mode = True
|
orm_mode = True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class User(UserSchema):
|
class User(UserSchema):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -69,17 +66,14 @@ class DeleteUser(BaseModel):
|
|||||||
|
|
||||||
class UserAdminUpdate(BaseModel):
|
class UserAdminUpdate(BaseModel):
|
||||||
id: int
|
id: int
|
||||||
fullname: str
|
username: str
|
||||||
role: str
|
role: str
|
||||||
department_id: int
|
department_id: int
|
||||||
|
|
||||||
class UserAdmin(BaseModel):
|
class UserAdmin(BaseModel):
|
||||||
fullname: str
|
username: str
|
||||||
department_id: int
|
department_id: int
|
||||||
|
|
||||||
|
|
||||||
class PasswordUpdate(BaseModel):
|
class PasswordUpdate(BaseModel):
|
||||||
password_created: Optional[datetime] = None
|
password_created: Optional[datetime] = None
|
||||||
|
|
||||||
class SuperMakerUpdate(BaseModel):
|
|
||||||
checker: bool
|
|
Loading…
Reference in New Issue
Block a user