Added users model for authentication with roles and user roles

This commit is contained in:
Saurab-Shrestha 2024-01-11 17:48:35 +05:45
parent 0a89d76cc5
commit b67b66cd44
53 changed files with 1955 additions and 43 deletions

15
.env Normal file
View File

@ -0,0 +1,15 @@
PORT=8000
ENVIRONMENT=dev
DB_HOST=localhost
DB_USER=postgres
DB_PASSWORD=quick
DB_NAME=QuickGpt
SUPER_ADMIN_EMAIL=superadmin@email.com
SUPER_ADMIN_PASSWORD=supersecretpassword
SUPER_ADMIN_ACCOUNT_NAME=superaccount
SECRET_KEY=ba9dc3f976cf8fb40519dcd152a8d7d21c0b7861d841711cdb2602be8e85fd7c
ACCESS_TOKEN_EXPIRE_MINUTES=60
REFRESH_TOKEN_EXPIRE_MINUTES = 120 # 7 days

0
=1.4, Normal file
View File

0
=1.9.1 Normal file
View File

116
alembic.ini Normal file
View File

@ -0,0 +1,116 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
alembic/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration.

85
alembic/env.py Normal file
View File

@ -0,0 +1,85 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from private_gpt.users.db.base_class import Base
from private_gpt.users.core.config import SQLALCHEMY_DATABASE_URI
from private_gpt.users.models.user import User
from private_gpt.users.models.role import Role
from private_gpt.users.models.user_role import UserRole
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
config.set_section_option(config.config_ini_section, "sqlalchemy.url", SQLALCHEMY_DATABASE_URI)
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

26
alembic/script.py.mako Normal file
View File

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,61 @@
"""Create user model, role model and user role model
Revision ID: 19e9eccf2c81
Revises:
Create Date: 2024-01-11 16:33:53.253969
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '19e9eccf2c81'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_roles_id'), 'roles', ['id'], unique=False)
op.create_index(op.f('ix_roles_name'), 'roles', ['name'], unique=False)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=225), nullable=False),
sa.Column('hashed_password', sa.LargeBinary(), nullable=False),
sa.Column('fullname', sa.String(length=225), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('last_login', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
op.create_table('user_roles',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('user_id', 'role_id'),
sa.UniqueConstraint('user_id', 'role_id', name='unique_user_role')
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('user_roles')
op.drop_table('users')
op.drop_index(op.f('ix_roles_name'), table_name='roles')
op.drop_index(op.f('ix_roles_id'), table_name='roles')
op.drop_table('roles')
# ### end Alembic commands ###

View File

@ -0,0 +1,30 @@
"""Create user model
Revision ID: 9491fd6d6fad
Revises: 19e9eccf2c81
Create Date: 2024-01-11 17:02:25.882848
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '9491fd6d6fad'
down_revision: Union[str, None] = '19e9eccf2c81'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint('unique_user_role', 'user_roles', ['user_id', 'role_id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('unique_user_role', 'user_roles', type_='unique')
# ### end Alembic commands ###

7
alembic_readme.md Normal file
View File

@ -0,0 +1,7 @@
## **Alembic migrations:**
`alembic init alembic` # initialize the alembic
`alembic revision --autogenerate -m "Create user model"` # first migration
`alembic upgrade 66b63a` # reflect migration on database (here 66b63a) is ssh value

116
private_gpt/alembic.ini Normal file
View File

@ -0,0 +1,116 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -11,6 +11,8 @@ from private_gpt.server.completions.completions_router import completions_router
from private_gpt.server.embeddings.embeddings_router import embeddings_router
from private_gpt.server.health.health_router import health_router
from private_gpt.server.ingest.ingest_router import ingest_router
from private_gpt.users.api.v1.api import api_router
from private_gpt.settings.settings import Settings
logger = logging.getLogger(__name__)
@ -30,6 +32,9 @@ def create_app(root_injector: Injector) -> FastAPI:
app.include_router(ingest_router)
app.include_router(embeddings_router)
app.include_router(health_router)
app.include_router(api_router)
settings = root_injector.get(Settings)
if settings.server.cors.enabled:
@ -45,9 +50,12 @@ def create_app(root_injector: Injector) -> FastAPI:
if settings.ui.enabled:
logger.debug("Importing the UI module")
from private_gpt.ui.ui import PrivateGptUi
from private_gpt.ui.admin_ui import PrivateAdminGptUi
admin_ui = root_injector.get(PrivateAdminGptUi)
admin_ui.mount_in_admin_app(app, '/admin')
from private_gpt.ui.ui import PrivateGptUi
ui = root_injector.get(PrivateGptUi)
ui.mount_in_app(app, settings.ui.path)
return app
return app

View File

@ -7,6 +7,7 @@ from private_gpt.settings.settings import settings
def _absolute_or_from_project_root(path: str) -> Path:
if path.startswith("/"):
return Path(path)
return PROJECT_ROOT_PATH / path

289
private_gpt/ui/admin_ui.py Normal file
View File

@ -0,0 +1,289 @@
"""This file should be imported only and only if you want to run the UI locally."""
import itertools
import logging
from collections.abc import Iterable
from pathlib import Path
from typing import Any
import gradio as gr # type: ignore
from fastapi import FastAPI
from gradio.themes.utils.colors import slate # type: ignore
from injector import inject, singleton
from llama_index.llms import ChatMessage, ChatResponse, MessageRole
from pydantic import BaseModel
from private_gpt.constants import PROJECT_ROOT_PATH
from private_gpt.di import global_injector
from private_gpt.server.chat.chat_service import ChatService, CompletionGen
from private_gpt.server.chunks.chunks_service import Chunk, ChunksService
from private_gpt.server.ingest.ingest_service import IngestService
from private_gpt.settings.settings import settings
from private_gpt.ui.images import logo_svg
from private_gpt.ui.common import Source
logger = logging.getLogger(__name__)
THIS_DIRECTORY_RELATIVE = Path(__file__).parent.relative_to(PROJECT_ROOT_PATH)
# Should be "private_gpt/ui/avatar-bot.ico"
AVATAR_BOT = THIS_DIRECTORY_RELATIVE / "avatar-bot.ico"
UI_TAB_TITLE = "My Private GPT"
SOURCES_SEPARATOR = "\n\n Sources: \n"
MODES = ["Query Docs", "Search in Docs", "LLM Chat"]
@singleton
class PrivateAdminGptUi:
@inject
def __init__(
self,
ingest_service: IngestService,
chat_service: ChatService,
chunks_service: ChunksService,
) -> None:
self._ingest_service = ingest_service
self._chat_service = chat_service
self._chunks_service = chunks_service
# Cache the UI blocks
self._ui_block = None
# Initialize system prompt based on default mode
self.mode = MODES[0]
self._system_prompt = self._get_default_system_prompt(self.mode)
def _chat(self, message: str, history: list[list[str]], mode: str, *_: Any) -> Any:
def yield_deltas(completion_gen: CompletionGen) -> Iterable[str]:
full_response: str = ""
stream = completion_gen.response
for delta in stream:
if isinstance(delta, str):
full_response += str(delta)
elif isinstance(delta, ChatResponse):
full_response += delta.delta or ""
yield full_response
if completion_gen.sources:
full_response += SOURCES_SEPARATOR
cur_sources = Source.curate_sources(completion_gen.sources)
sources_text = "\n\n\n".join(
f"{index}. {source.file} (page {source.page})"
for index, source in enumerate(cur_sources, start=1)
)
full_response += sources_text
yield full_response
def build_history() -> list[ChatMessage]:
history_messages: list[ChatMessage] = list(
itertools.chain(
*[
[
ChatMessage(content=interaction[0], role=MessageRole.USER),
ChatMessage(
# Remove from history content the Sources information
content=interaction[1].split(SOURCES_SEPARATOR)[0],
role=MessageRole.ASSISTANT,
),
]
for interaction in history
]
)
)
# max 20 messages to try to avoid context overflow
return history_messages[:20]
new_message = ChatMessage(content=message, role=MessageRole.USER)
all_messages = [*build_history(), new_message]
# If a system prompt is set, add it as a system message
if self._system_prompt:
all_messages.insert(
0,
ChatMessage(
content=self._system_prompt,
role=MessageRole.SYSTEM,
),
)
match mode:
case "Query Docs":
query_stream = self._chat_service.stream_chat(
messages=all_messages,
use_context=True,
)
yield from yield_deltas(query_stream)
case "LLM Chat":
llm_stream = self._chat_service.stream_chat(
messages=all_messages,
use_context=False,
)
yield from yield_deltas(llm_stream)
case "Search in Docs":
response = self._chunks_service.retrieve_relevant(
text=message, limit=4, prev_next_chunks=0
)
sources = Source.curate_sources(response)
yield "\n\n\n".join(
f"{index}. **{source.file} "
f"(page {source.page})**\n "
f"{source.text}"
for index, source in enumerate(sources, start=1)
)
# On initialization and on mode change, this function set the system prompt
# to the default prompt based on the mode (and user settings).
@staticmethod
def _get_default_system_prompt(mode: str) -> str:
p = ""
match mode:
# For query chat mode, obtain default system prompt from settings
case "Query Docs":
p = settings().ui.default_query_system_prompt
# For chat mode, obtain default system prompt from settings
case "LLM Chat":
p = settings().ui.default_chat_system_prompt
# For any other mode, clear the system prompt
case _:
p = ""
return p
def _set_system_prompt(self, system_prompt_input: str) -> None:
logger.info(f"Setting system prompt to: {system_prompt_input}")
self._system_prompt = system_prompt_input
def _set_current_mode(self, mode: str) -> Any:
self.mode = mode
self._set_system_prompt(self._get_default_system_prompt(mode))
# Update placeholder and allow interaction if default system prompt is set
if self._system_prompt:
return gr.update(placeholder=self._system_prompt, interactive=True)
# Update placeholder and disable interaction if no default system prompt is set
else:
return gr.update(placeholder=self._system_prompt, interactive=False)
def _list_ingested_files(self) -> list[list[str]]:
files = set()
for ingested_document in self._ingest_service.list_ingested():
if ingested_document.doc_metadata is None:
# Skipping documents without metadata
continue
file_name = ingested_document.doc_metadata.get(
"file_name", "[FILE NAME MISSING]"
)
files.add(file_name)
return [[row] for row in files]
def _upload_file(self, files: list[str]) -> None:
logger.debug("Loading count=%s files", len(files))
paths = [Path(file) for file in files]
self._ingest_service.bulk_ingest([(str(path.name), path) for path in paths])
def _build_admin_ui_blocks(self) -> gr.Blocks:
logger.debug("Creating the UI blocks")
with gr.Blocks(
title=UI_TAB_TITLE,
theme=gr.themes.Soft(primary_hue=slate),
css=".logo { "
"display:flex;"
"background-color: #C7BAFF;"
"height: 80px;"
"border-radius: 8px;"
"align-content: center;"
"justify-content: center;"
"align-items: center;"
"}"
".logo img { height: 25% }"
".contain { display: flex !important; flex-direction: column !important; }"
"#component-0, #component-3, #component-10, #component-8 { height: 100% !important; }"
"#chatbot { flex-grow: 1 !important; overflow: auto !important;}"
"#col { height: calc(100vh - 112px - 16px) !important; }",
) as blocks:
with gr.Row():
gr.HTML(f"<div class='logo'/><img src={logo_svg} alt=PrivateGPT></div")
with gr.Row(equal_height=False):
with gr.Column(scale=3):
mode = gr.Radio(
MODES,
label="Mode",
value="Query Docs",
)
upload_button = gr.components.UploadButton(
"Upload File(s)",
type="filepath",
file_count="multiple",
size="sm",
)
ingested_dataset = gr.List(
self._list_ingested_files,
headers=["File name"],
label="Ingested Files",
interactive=False,
render=False, # Rendered under the button
)
upload_button.upload(
self._upload_file,
inputs=upload_button,
outputs=ingested_dataset,
)
ingested_dataset.change(
self._list_ingested_files,
outputs=ingested_dataset,
)
ingested_dataset.render()
system_prompt_input = gr.Textbox(
placeholder=self._system_prompt,
label="System Prompt",
lines=2,
interactive=True,
render=False,
)
# When mode changes, set default system prompt
mode.change(
self._set_current_mode, inputs=mode, outputs=system_prompt_input
)
# On blur, set system prompt to use in queries
system_prompt_input.blur(
self._set_system_prompt,
inputs=system_prompt_input,
)
with gr.Column(scale=7, elem_id="col"):
_ = gr.ChatInterface(
self._chat,
chatbot=gr.Chatbot(
label=f"LLM: {settings().llm.mode}",
show_copy_button=True,
elem_id="chatbot",
render=False,
avatar_images=(
None,
AVATAR_BOT,
),
),
additional_inputs=[mode, upload_button, system_prompt_input],
)
return blocks
def get_admin_ui_blocks(self) -> gr.Blocks:
if self._ui_block is None:
self._ui_block = self._build_admin_ui_blocks()
return self._ui_block
def mount_in_admin_app(self, app: FastAPI, path: str) -> None:
logger.info("PATH---------------------------->:%s", path)
blocks = self.get_admin_ui_blocks()
blocks.queue()
logger.info("Mounting the admin gradio UI at path=%s", path)
gr.mount_gradio_app(app, blocks, path=path)
if __name__ == "__main__":
ui = global_injector.get(PrivateAdminGptUi)
_blocks = ui.get_admin_ui_blocks()
_blocks.queue()
_blocks.launch(debug=False, show_api=False)

25
private_gpt/ui/common.py Normal file
View File

@ -0,0 +1,25 @@
from pydantic import BaseModel
from private_gpt.server.chunks.chunks_service import Chunk, ChunksService
class Source(BaseModel):
file: str
page: str
text: str
class Config:
frozen = True
@staticmethod
def curate_sources(sources: list[Chunk]) -> set["Source"]:
curated_sources = set()
for chunk in sources:
doc_metadata = chunk.document.doc_metadata
file_name = doc_metadata.get("file_name", "-") if doc_metadata else "-"
page_label = doc_metadata.get("page_label", "-") if doc_metadata else "-"
source = Source(file=file_name, page=page_label, text=chunk.text)
curated_sources.add(source)
return curated_sources

View File

@ -0,0 +1 @@
tmp lock file

View File

@ -0,0 +1 @@
{"collections": {}, "aliases": {}}

View File

@ -10,7 +10,6 @@ from fastapi import FastAPI
from gradio.themes.utils.colors import slate # type: ignore
from injector import inject, singleton
from llama_index.llms import ChatMessage, ChatResponse, MessageRole
from pydantic import BaseModel
from private_gpt.constants import PROJECT_ROOT_PATH
from private_gpt.di import global_injector
@ -19,6 +18,7 @@ from private_gpt.server.chunks.chunks_service import Chunk, ChunksService
from private_gpt.server.ingest.ingest_service import IngestService
from private_gpt.settings.settings import settings
from private_gpt.ui.images import logo_svg
from private_gpt.ui.common import Source
logger = logging.getLogger(__name__)
@ -33,29 +33,6 @@ SOURCES_SEPARATOR = "\n\n Sources: \n"
MODES = ["Query Docs", "Search in Docs", "LLM Chat"]
class Source(BaseModel):
file: str
page: str
text: str
class Config:
frozen = True
@staticmethod
def curate_sources(sources: list[Chunk]) -> set["Source"]:
curated_sources = set()
for chunk in sources:
doc_metadata = chunk.document.doc_metadata
file_name = doc_metadata.get("file_name", "-") if doc_metadata else "-"
page_label = doc_metadata.get("page_label", "-") if doc_metadata else "-"
source = Source(file=file_name, page=page_label, text=chunk.text)
curated_sources.add(source)
return curated_sources
@singleton
class PrivateGptUi:
@ -224,7 +201,7 @@ class PrivateGptUi:
"#component-0, #component-3, #component-10, #component-8 { height: 100% !important; }"
"#chatbot { flex-grow: 1 !important; overflow: auto !important;}"
"#col { height: calc(100vh - 112px - 16px) !important; }",
) as blocks:
) as users:
with gr.Row():
gr.HTML(f"<div class='logo'/><img src={logo_svg} alt=PrivateGPT></div")
@ -235,12 +212,6 @@ class PrivateGptUi:
label="Mode",
value="Query Docs",
)
upload_button = gr.components.UploadButton(
"Upload File(s)",
type="filepath",
file_count="multiple",
size="sm",
)
ingested_dataset = gr.List(
self._list_ingested_files,
headers=["File name"],
@ -248,11 +219,6 @@ class PrivateGptUi:
interactive=False,
render=False, # Rendered under the button
)
upload_button.upload(
self._upload_file,
inputs=upload_button,
outputs=ingested_dataset,
)
ingested_dataset.change(
self._list_ingested_files,
outputs=ingested_dataset,
@ -288,9 +254,9 @@ class PrivateGptUi:
AVATAR_BOT,
),
),
additional_inputs=[mode, upload_button, system_prompt_input],
additional_inputs=[mode, system_prompt_input],
)
return blocks
return users
def get_ui_blocks(self) -> gr.Blocks:
if self._ui_block is None:
@ -298,14 +264,17 @@ class PrivateGptUi:
return self._ui_block
def mount_in_app(self, app: FastAPI, path: str) -> None:
logger.info("PATH---------------------------->:%s", path)
blocks = self.get_ui_blocks()
blocks.queue()
logger.info("Mounting the gradio UI, at path=%s", path)
logger.info("Mounting the regular gradio UI at path=%s", path)
gr.mount_gradio_app(app, blocks, path=path)
if __name__ == "__main__":
ui = global_injector.get(PrivateGptUi)
_blocks = ui.get_ui_blocks()
_blocks = ui.get_admin_ui_blocks()
_blocks.queue()
_blocks.launch(debug=False, show_api=False)

141
private_gpt/ui/users_ui.py Normal file
View File

@ -0,0 +1,141 @@
"""This file should be imported only and only if you want to run the UI locally."""
import itertools
import logging
from collections.abc import Iterable
from pathlib import Path
from typing import Any
import gradio as gr # type: ignore
from fastapi import FastAPI
from gradio.themes.utils.colors import slate # type: ignore
from injector import inject, singleton
from llama_index.llms import ChatMessage, ChatResponse, MessageRole
from pydantic import BaseModel
from private_gpt.constants import PROJECT_ROOT_PATH
from private_gpt.di import global_injector
from private_gpt.server.chat.chat_service import ChatService, CompletionGen
from private_gpt.server.chunks.chunks_service import Chunk, ChunksService
from private_gpt.server.ingest.ingest_service import IngestService
from private_gpt.settings.settings import settings
from private_gpt.ui.images import logo_svg
logger = logging.getLogger(__name__)
THIS_DIRECTORY_RELATIVE = Path(__file__).parent.relative_to(PROJECT_ROOT_PATH)
# Should be "private_gpt/ui/avatar-bot.ico"
AVATAR_BOT = THIS_DIRECTORY_RELATIVE / "avatar-bot.ico"
UI_TAB_TITLE = "My Private GPT"
SOURCES_SEPARATOR = "\n\n Sources: \n"
MODES = ["Query Docs", "Search in Docs", "LLM Chat"]
from private_gpt.ui.common import PrivateGpt
@singleton
class UsersUI(PrivateGpt):
def __init__(
self,
ingest_service: IngestService,
chat_service: ChatService,
chunks_service: ChunksService,
) -> None:
super().__init__(ingest_service, chat_service, chunks_service)
def _build_ui_blocks(self) -> gr.Blocks:
logger.debug("Creating the UI blocks")
with gr.Blocks(
title=UI_TAB_TITLE,
theme=gr.themes.Soft(primary_hue=slate),
css=".logo { "
"display:flex;"
"background-color: #C7BAFF;"
"height: 80px;"
"border-radius: 8px;"
"align-content: center;"
"justify-content: center;"
"align-items: center;"
"}"
".logo img { height: 25% }"
".contain { display: flex !important; flex-direction: column !important; }"
"#component-0, #component-3, #component-10, #component-8 { height: 100% !important; }"
"#chatbot { flex-grow: 1 !important; overflow: auto !important;}"
"#col { height: calc(100vh - 112px - 16px) !important; }",
) as users:
# with gr.Row():
# gr.HTML(f"<div class='logo'/><img src={logo_svg} alt=PrivateGPT></div")
with gr.Row(equal_height=False):
with gr.Column(scale=3):
mode = gr.Radio(
MODES,
label="Mode",
value="Query Docs",
)
ingested_dataset = gr.List(
self._list_ingested_files,
headers=["File name"],
label="Ingested Files",
interactive=False,
render=False, # Rendered under the button
)
ingested_dataset.change(
self._list_ingested_files,
outputs=ingested_dataset,
)
ingested_dataset.render()
system_prompt_input = gr.Textbox(
placeholder=self._system_prompt,
label="System Prompt",
lines=2,
interactive=True,
render=False,
)
# When mode changes, set default system prompt
mode.change(
self._set_current_mode, inputs=mode, outputs=system_prompt_input
)
# On blur, set system prompt to use in queries
system_prompt_input.blur(
self._set_system_prompt,
inputs=system_prompt_input,
)
with gr.Column(scale=7, elem_id="col"):
_ = gr.ChatInterface(
self._chat,
chatbot=gr.Chatbot(
label=f"LLM: {settings().llm.mode}",
show_copy_button=True,
elem_id="chatbot",
render=False,
# avatar_images=(
# None,
# AVATAR_BOT,
# ),
),
additional_inputs=[mode, system_prompt_input],
)
return users
def get_ui_blocks(self) -> gr.Blocks:
if self._ui_block is None:
self._ui_block = self._build_ui_blocks()
return self._ui_block
def mount_in_app(self, app: FastAPI, path: str) -> None:
logger.info("PATH---------------------------->:%s", path)
blocks = self.get_ui_blocks()
blocks.queue()
logger.info("Mounting the regular gradio UI at path=%s", path)
gr.mount_gradio_app(app, blocks, path=path)
if __name__ == "__main__":
ui = global_injector.get(UsersUI)
_blocks = ui.get_ui_blocks()
_blocks.queue()
_blocks.launch(debug=False, show_api=False)

View File

View File

View File

@ -0,0 +1,68 @@
from typing import Union, Any, Generator
from datetime import datetime
from private_gpt.users import crud, models, schemas
from private_gpt.users.db.session import SessionLocal
from fastapi import Depends, HTTPException, status
from fastapi.security import OAuth2PasswordBearer
from private_gpt.users.core.security import (
ALGORITHM,
JWT_SECRET_KEY
)
from fastapi import Depends, HTTPException, Security, status
from jose import jwt
from pydantic import ValidationError
from private_gpt.users.schemas.token import TokenPayload
from sqlalchemy.orm import Session
reuseable_oauth = OAuth2PasswordBearer(
tokenUrl="/login",
scheme_name="JWT"
)
def get_db() -> Generator:
try:
db = SessionLocal()
yield db
finally:
db.close()
async def get_current_user(
db: Session = Depends(get_db),
token: str = Depends(reuseable_oauth)
) -> models.User:
try:
payload = jwt.decode(
token, JWT_SECRET_KEY, algorithms=[ALGORITHM]
)
token_data = TokenPayload(**payload)
if datetime.fromtimestamp(token_data.exp) < datetime.now():
raise HTTPException(
status_code = status.HTTP_401_UNAUTHORIZED,
detail="Token expired",
headers={"WWW-Authenticate": "Bearer"},
)
except(jwt.JWTError, ValidationError):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
user: Union[dict[str, Any], None] = crud.user.get(db, id=token_data.id)
if user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Could not find user",
)
return user
async def get_current_active_user(
current_user: models.User = Security(get_current_user, scopes=[],),
) -> models.User:
if not crud.user.is_active(current_user):
raise HTTPException(status_code=400, detail="Inactive user")
return current_user

View File

View File

@ -0,0 +1,9 @@
from private_gpt.users.api.v1.routers import auth, roles, user_roles, users
from fastapi import APIRouter
api_router = APIRouter()
api_router.include_router(auth.router)
# api_router.include_router(users.router)
# api_router.include_router(roles.router)
# api_router.include_router(user_roles.router)

View File

@ -0,0 +1,132 @@
from datetime import timedelta, datetime
from typing import Any
from private_gpt.users import crud, models, schemas
from private_gpt.users.api import deps
from private_gpt.users.core import security
from private_gpt.users.constants.role import Role
from private_gpt.users.core.config import settings
from fastapi import APIRouter, Body, Depends, HTTPException
from fastapi.security import OAuth2PasswordRequestForm
from pydantic.networks import EmailStr
from sqlalchemy.orm import Session
router = APIRouter(prefix="/auth", tags=["auth"])
@router.post("/login", response_model=schemas.TokenSchema)
def login_access_token(
db: Session = Depends(deps.get_db),
form_data: OAuth2PasswordRequestForm = Depends(),
) -> Any:
"""
OAuth2 compatible token login, get an access token for future requests
"""
user = crud.user.authenticate(
db, email=form_data.username, password=form_data.password
)
if not user:
raise HTTPException(
status_code=400, detail="Incorrect email or password"
)
elif not crud.user.is_active(user):
raise HTTPException(status_code=400, detail="Inactive user")
access_token_expires = timedelta(
minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES
)
refresh_token_expires = timedelta(
minutes=settings.REFRESH_TOKEN_EXPIRE_MINUTES
)
user_in = schemas.UserUpdate(
last_login=datetime.now()
)
user = crud.user.update(db, db_obj=user, obj_in=user_in)
if not user.user_role:
role = "GUEST"
else:
role = user.user_role.role.name
token_payload = {
"id": str(user.id),
"role": role,
}
return {
"access_token": security.create_access_token(
token_payload, expires_delta=access_token_expires
),
"refresh_token": security.create_refresh_token(
token_payload, expires_delta=refresh_token_expires
)
}
@router.post("/register", response_model=schemas.TokenSchema)
def register(
*,
db: Session = Depends(deps.get_db),
email: EmailStr = Body(...),
password: str = Body(...),
fullname: str = Body(...),
) -> Any:
"""
Register new user.
"""
user = crud.user.get_by_email(db, email=email)
if user:
raise HTTPException(
status_code=409,
detail="The user with this username already exists in the system",
)
print("here 1...")
user_in = schemas.UserCreate(
email=email,
password=password,
fullname=fullname,
)
print("here 2...")
# create user
user = crud.user.create(db, obj_in=user_in)
print("here 4...")
# get role
role = crud.role.get_by_name(db, name=Role.ACCOUNT_ADMIN["name"])
print("here 4...")
# assign user_role
user_role_in = schemas.UserRoleCreate(
user_id=user.id,
role_id=role.id
)
user_role = crud.user_role.create(db, obj_in=user_role_in)
print("here 5...")
access_token_expires = timedelta(
minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES
)
refresh_token_expires = timedelta(
minutes=settings.REFRESH_TOKEN_EXPIRE_MINUTES
)
if not user.user_role:
role = "GUEST"
else:
role = user.user_role.role.name
token_payload = {
"id": str(user.id),
"role": role,
}
return {
"access_token": security.create_access_token(
token_payload, expires_delta=access_token_expires
),
"refresh_token": security.create_refresh_token(
token_payload, expires_delta=refresh_token_expires
)
}

View File

@ -0,0 +1,18 @@
from typing import Any, List
from private_gpt.users import crud, schemas
from private_gpt.users.api import deps
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
router = APIRouter(prefix='/roles', tags=['roles'])
@router.get("/", response_model= List[schemas.Role])
def get_roles(
db: Session = Depends(deps.get_db), skip: int = 0, limit: int = 100,
) -> Any:
"""
Retrieve all avaiable user roles.
"""
roles = crud.roles_crud.get_multi(db, skip=skip, limit=limit)
return roles

View File

@ -0,0 +1,58 @@
from typing import Any
from private_gpt.users import crud, models, schemas
from private_gpt.users.api import deps
from private_gpt.users.constants.role import Role
from fastapi import APIRouter, Depends, HTTPException, Security
from sqlalchemy.orm import Session
router = APIRouter(prefix="/user-roles", tags=["user-roles"])
@router.post("", response_model=schemas.UserRole)
def assign_user_role(
*,
db: Session = Depends(deps.get_db),
user_role_in: schemas.UserRoleCreate,
current_user: models.User = Depends(deps.get_current_active_user),
) -> Any:
"""
Assign a role to a user after creation of a user
"""
user_role = crud.user_role.get_by_user_id(db, user_id=user_role_in.user_id)
if user_role:
raise HTTPException(
status_code=409,
detail="This user has already been assigned a role.",
)
user_role = crud.user_role.create(db, obj_in=user_role_in)
return user_role
@router.put("/{user_id}", response_model=schemas.UserRole)
def update_user_role(
*,
db: Session = Depends(deps.get_db),
user_id: int,
user_role_in: schemas.UserRoleUpdate,
# current_user: models.User = Security(
# deps.get_current_active_user,
# scopes=[
# Role.ADMIN["name"],
# Role.SUPER_ADMIN["name"],
# Role.ACCOUNT_ADMIN["name"],
# ],
# ),
) -> Any:
"""
Update a users role.
"""
user_role = crud.user_role.get_by_user_id(db, user_id=user_id)
if not user_role:
raise HTTPException(
status_code=404, detail="There is no role assigned to this user",
)
user_role = crud.user_role.update(
db, db_obj=user_role, obj_in=user_role_in
)
return user_role

View File

@ -0,0 +1,165 @@
from typing import Any, List
from private_gpt.users import crud, models, schemas
from private_gpt.users.api import deps
from private_gpt.users.constants.role import Role
from private_gpt.users.core.config import settings
from fastapi import APIRouter, Body, Depends, HTTPException, Security
from fastapi.encoders import jsonable_encoder
from pydantic.networks import EmailStr
from sqlalchemy.orm import Session
router = APIRouter(prefix="/users", tags=["users"])
@router.get("", response_model=List[schemas.User])
def read_users(
db: Session = Depends(deps.get_db),
skip: int = 0,
limit: int = 100,
# current_user: models.User = Security(
# deps.get_current_active_user,
# scopes=[Role.ADMIN["name"], Role.SUPER_ADMIN["name"]],
# ),
) -> Any:
"""
Retrieve all users.
"""
users = crud.user.get_multi(db, skip=skip, limit=limit,)
return users
@router.post("", response_model=schemas.User)
def create_user(
*,
db: Session = Depends(deps.get_db),
user_in: schemas.UserCreate,
current_user: models.User = Security(
deps.get_current_active_user,
scopes=[Role.ADMIN["name"], Role.SUPER_ADMIN["name"]],
),
) -> Any:
"""
Create new user.
"""
user = crud.user.get_by_email(db, email=user_in.email)
if user:
raise HTTPException(
status_code=409,
detail="The user with this username already exists in the system.",
)
user = crud.user.create(db, obj_in=user_in)
return user
@router.put("/me", response_model=schemas.User)
def update_user_me(
*,
db: Session = Depends(deps.get_db),
fullname: str = Body(None),
phone_number: str = Body(None),
email: EmailStr = Body(None),
current_user: models.User = Depends(deps.get_current_active_user),
) -> Any:
"""
Update own user.
"""
current_user_data = jsonable_encoder(current_user)
user_in = schemas.UserUpdate(**current_user_data)
if phone_number is not None:
user_in.phone_number = phone_number
if fullname is not None:
user_in.fullname = fullname
if email is not None:
user_in.email = email
user = crud.user.update(db, db_obj=current_user, obj_in=user_in)
return user
@router.get("/me", response_model=schemas.User)
def read_user_me(
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_user),
) -> Any:
"""
Get current user.
"""
if not current_user.user_role:
role = None
else:
role = current_user.user_role.role.name
user_data = schemas.User(
id=current_user.id,
email=current_user.email,
is_active=current_user.is_active,
fullname=current_user.fullname,
created_at=current_user.created_at,
updated_at=current_user.updated_at,
role=role,
)
return user_data
@router.get("/me/change-password", response_model=schemas.User)
def read_user_me(
db: Session = Depends(deps.get_db),
current_user: models.User = Depends(deps.get_current_active_user),
old_password: str = Body(None),
new_password: str = Body(None),
) -> Any:
"""
Get current user.
"""
if not current_user.user_role:
role = None
else:
role = current_user.user_role.role.name
user_data = schemas.User(
id=current_user.id,
email=current_user.email,
is_active=current_user.is_active,
fullname=current_user.fullname,
created_at=current_user.created_at,
updated_at=current_user.updated_at,
role=role,
)
return user_data
@router.get("/{user_id}", response_model=schemas.User)
def read_user_by_id(
user_id: int,
current_user: models.User = Security(
deps.get_current_active_user,
scopes=[Role.ADMIN["name"], Role.SUPER_ADMIN["name"]],
),
db: Session = Depends(deps.get_db),
) -> Any:
"""
Get a specific user by id.
"""
user = crud.user.get(db, id=user_id)
return user
@router.put("/{user_id}", response_model=schemas.User)
def update_user(
*,
db: Session = Depends(deps.get_db),
user_id: int,
user_in: schemas.UserUpdate,
current_user: models.User = Security(
deps.get_current_active_user,
scopes=[Role.ADMIN["name"], Role.SUPER_ADMIN["name"]],
),
) -> Any:
"""
Update a user.
"""
user = crud.user.get(db, id=user_id)
if not user:
raise HTTPException(
status_code=404,
detail="The user with this username does not exist in the system",
)
user = crud.user.update(db, db_obj=user, obj_in=user_in)
return user

View File

View File

@ -0,0 +1,17 @@
class Role:
"""
Constants for the various roles scoped in the application ecosystem
"""
GUEST = {
"name": "GUEST",
"description": "A Guest Account",
}
ADMIN = {
"name": "ADMIN",
"description": "Admin of Application Ecosystem",
}
SUPER_ADMIN = {
"name": "SUPER_ADMIN",
"description": "Super Administrator of Application Ecosystem",
}

View File

View File

@ -0,0 +1,64 @@
from functools import lru_cache
from typing import Any, Dict, Optional
from pydantic import PostgresDsn, validator
from pydantic_settings import BaseSettings
SQLALCHEMY_DATABASE_URI = "postgresql+psycopg2://{username}:{password}@{host}:{port}/{db_name}".format(
host='localhost',
port='5432',
db_name='QuickGpt',
username='postgres',
password="quick",
)
class Settings(BaseSettings):
PROJECT_NAME: str = "AUTHENTICATION AND AUTHORIZATION"
API_V1_STR: str = "/api/v1"
SECRET_KEY: str
ACCESS_TOKEN_EXPIRE_MINUTES: int
REFRESH_TOKEN_EXPIRE_MINUTES: int
ENVIRONMENT: Optional[str]
SUPER_ADMIN_EMAIL: str
SUPER_ADMIN_PASSWORD: str
SUPER_ADMIN_ACCOUNT_NAME: str
DB_HOST: str
DB_USER: str
DB_PASSWORD: str
DB_NAME: str
PORT: str
# SQLALCHEMY_DATABASE_URI: Optional[PostgresDsn] = None
# @validator("SQLALCHEMY_DATABASE_URI", pre=True)
# def assemble_db_connection(
# cls, v: Optional[str], values: Dict[str, Any]
# ) -> Any:
# if isinstance(v, str):
# return v
# return PostgresDsn.build(
# scheme="postgresql",
# user=values.get("DB_USER"),
# password=values.get("DB_PASS"),
# host=values.get("DB_HOST"),
# path=f"/{values.get('DB_NAME') or ''}",
# )
# Database url configuration
class Config:
case_sensitive = True
env_file = ".env"
@lru_cache()
def get_settings():
return Settings()
settings = get_settings()

View File

@ -0,0 +1,44 @@
from passlib.context import CryptContext
import os
from datetime import datetime, timedelta
from typing import Union, Any
from jose import jwt
ACCESS_TOKEN_EXPIRE_MINUTES = 30 # 30 minutes
REFRESH_TOKEN_EXPIRE_MINUTES = 60 * 24 * 7 # 7 days
ALGORITHM = "HS256"
# JWT_SECRET_KEY = os.environ['JWT_SECRET_KEY'] # should be kept secret
# JWT_REFRESH_SECRET_KEY = os.environ['JWT_REFRESH_SECRET_KEY'] # should be kept secret
JWT_SECRET_KEY = "QUICKGPT"
JWT_REFRESH_SECRET_KEY = "QUICKGPT_REFRESH"
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
def verify_password(plain_password: str, hashed_password: str) -> bool:
return pwd_context.verify(plain_password, hashed_password)
def get_password_hash(password: str) -> str:
return pwd_context.hash(password)
def create_access_token(subject: Union[str, Any], expires_delta: int = None) -> str:
if expires_delta is not None:
expires_delta = datetime.utcnow() + expires_delta
else:
expires_delta = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
to_encode = {"exp": expires_delta, **subject}
encoded_jwt = jwt.encode(to_encode, JWT_SECRET_KEY, ALGORITHM)
return encoded_jwt
def create_refresh_token(subject: Union[str, Any], expires_delta: int = None) -> str:
if expires_delta is not None:
expires_delta = datetime.utcnow() + expires_delta
else:
expires_delta = datetime.utcnow() + timedelta(minutes=REFRESH_TOKEN_EXPIRE_MINUTES)
to_encode = {"exp": expires_delta, **subject}
encoded_jwt = jwt.encode(to_encode, JWT_REFRESH_SECRET_KEY, ALGORITHM)
return encoded_jwt

View File

@ -0,0 +1,3 @@
from .role_crud import role
from .user_crud import user
from .user_role_crud import user_role

View File

@ -0,0 +1,64 @@
from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union
from private_gpt.users.db.base import Base
from fastapi.encoders import jsonable_encoder
from pydantic import BaseModel
from sqlalchemy.orm import Session
# Define custom types for SQLAlchemy model, and Pydantic schemas
ModelType = TypeVar("ModelType", bound=Base)
CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel)
UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel)
class CRUDBase(Generic[ModelType, CreateSchemaType, UpdateSchemaType]):
def __init__(self, model: Type[ModelType]):
"""Base class that can be extend by other action classes.
Provides basic CRUD and listing operations.
:param model: The SQLAlchemy model
:type model: Type[ModelType]
"""
self.model = model
def get_multi(
self, db: Session, *, skip: int = 0, limit: int = 100
) -> List[ModelType]:
return db.query(self.model).offset(skip).limit(limit).all()
def get(self, db: Session, id: int) -> Optional[ModelType]:
return db.query(self.model).filter(self.model.id == id).first()
def create(self, db: Session, *, obj_in: CreateSchemaType) -> ModelType:
obj_in_data = jsonable_encoder(obj_in)
db_obj = self.model(**obj_in_data) # type: ignore
db.add(db_obj)
db.commit()
db.refresh(db_obj)
return db_obj
def update(
self,
db: Session,
*,
db_obj: ModelType,
obj_in: Union[UpdateSchemaType, Dict[str, Any]]
) -> ModelType:
obj_data = jsonable_encoder(db_obj)
if isinstance(obj_in, dict):
update_data = obj_in
else:
update_data = obj_in.dict(exclude_unset=True)
for field in obj_data:
if field in update_data:
setattr(db_obj, field, update_data[field])
db.add(db_obj)
db.commit()
db.refresh(db_obj)
return db_obj
def remove(self, db: Session, *, id: int) -> ModelType:
obj = db.query(self.model).get(id)
db.delete(obj)
db.commit()
return obj

View File

@ -0,0 +1,13 @@
from typing import Optional
from private_gpt.users.crud.base import CRUDBase
from private_gpt.users.models.role import Role
from private_gpt.users.schemas.role import RoleCreate, RoleUpdate
from sqlalchemy.orm import Session
class CRUDRole(CRUDBase[Role, RoleCreate, RoleUpdate]):
def get_by_name(self, db: Session, *, name: str) -> Optional[Role]:
return db.query(self.model).filter(Role.name == name).first()
role = CRUDRole(Role)

View File

@ -0,0 +1,77 @@
from typing import Any, Dict, List, Optional, Union
from private_gpt.users.core.security import get_password_hash, verify_password
from private_gpt.users.crud.base import CRUDBase
from private_gpt.users.models.user import User
from private_gpt.users.schemas.user import UserCreate, UserUpdate
from sqlalchemy.orm import Session
class CRUDUser(CRUDBase[User, UserCreate, UserUpdate]):
def get_by_email(self, db: Session, *, email: str) -> Optional[User]:
return db.query(self.model).filter(User.email == email).first()
def create(self, db: Session, *, obj_in: UserCreate) -> User:
db_obj = User(
email=obj_in.email,
hashed_password=get_password_hash(obj_in.password),
fullname=obj_in.fullname,
)
db.add(db_obj)
db.commit()
db.refresh(db_obj)
return db_obj
def update(
self,
db: Session,
*,
db_obj: User,
obj_in: Union[UserUpdate, Dict[str, Any]],
) -> User:
if isinstance(obj_in, dict):
update_data = obj_in
else:
update_data = obj_in.dict(exclude_unset=True)
if "password" in update_data:
hashed_password = get_password_hash(update_data["password"])
del update_data["password"]
update_data["hashed_password"] = hashed_password
return super().update(db, db_obj=db_obj, obj_in=update_data)
def get_multi(
self, db: Session, *, skip: int = 0, limit: int = 100,
) -> List[User]:
return db.query(self.model).offset(skip).limit(limit).all()
def authenticate(
self, db: Session, *, email: str, password: str
) -> Optional[User]:
user = self.get_by_email(db, email=email)
if not user:
return None
if not verify_password(password, user.hashed_password):
return None
return user
def is_active(self, user: User) -> bool:
return user.is_active
def get_by_account_id(
self,
db: Session,
*,
account_id: int,
skip: int = 0,
limit: int = 100,
) -> List[User]:
return (
db.query(self.model)
.filter(User.account_id == account_id)
.offset(skip)
.limit(limit)
.all()
)
user = CRUDUser(User)

View File

@ -0,0 +1,16 @@
from typing import Optional
from private_gpt.users.crud.base import CRUDBase
from private_gpt.users.models.user_role import UserRole
from private_gpt.users.schemas.user_role import UserRoleCreate, UserRoleUpdate
from sqlalchemy.orm import Session
class CRUDUserRole(CRUDBase[UserRole, UserRoleCreate, UserRoleUpdate]):
def get_by_user_id(
self, db: Session, *, user_id: int
) -> Optional[UserRole]:
return db.query(UserRole).filter(UserRole.user_id == user_id).first()
user_role = CRUDUserRole(UserRole)

View File

View File

@ -0,0 +1,4 @@
from private_gpt.users.db.base_class import Base
from private_gpt.users.models.user import User
from private_gpt.users.models.role import Role
from private_gpt.users.models.user_role import UserRole

View File

@ -0,0 +1,15 @@
from typing import Any
import inflect
from sqlalchemy.ext.declarative import as_declarative, declared_attr
p = inflect.engine()
@as_declarative()
class Base:
id: Any
__name__: str
# Generate __tablename__ automatically
@declared_attr
def __tablename__(cls) -> str:
return p.plural(cls.__name__.lower())

View File

@ -0,0 +1,11 @@
from private_gpt.users.db.session import SessionLocal
from sqlalchemy.orm import Session
def init_db(db: Session) -> None:
"""Database session generator"""
db = SessionLocal()
try:
yield db
finally:
db.close()

View File

@ -0,0 +1,13 @@
from private_gpt.users.core.config import SQLALCHEMY_DATABASE_URI
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
engine = create_engine(SQLALCHEMY_DATABASE_URI, echo=True, future=True, pool_pre_ping=True)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# test_engine = create_engine(
# f"{settings.SQLALCHEMY_DATABASE_URI}_test", pool_pre_ping=True
# )
# TestingSessionLocal = sessionmaker(
# autocommit=False, autoflush=False, bind=test_engine
# )

View File

@ -0,0 +1 @@
from .user import User

View File

@ -0,0 +1,7 @@
from private_gpt.users.db.base_class import Base
from sqlalchemy import Column, String, Text, Integer
class Role(Base):
id = Column(Integer, primary_key=True, index=True)
name = Column(String(100), index=True)
description = Column(Text)

View File

@ -0,0 +1,47 @@
import datetime
from sqlalchemy import (
LargeBinary,
Column,
String,
Integer,
Boolean,
UniqueConstraint,
PrimaryKeyConstraint,
DateTime,
ForeignKey
)
from sqlalchemy.orm import relationship
from private_gpt.users.db.base_class import Base
class User(Base):
"""Models a user table"""
__tablename__ = "users"
id = Column(Integer, nullable=False, primary_key=True)
email = Column(String(225), nullable=False, unique=True)
hashed_password = Column(LargeBinary, nullable=False)
fullname = Column(String(225), nullable=False)
UniqueConstraint("email", name="uq_user_email")
PrimaryKeyConstraint("id", name="pk_user_id")
is_active = Column(Boolean, default=False)
last_login = Column(DateTime, nullable=True)
created_at = Column(DateTime, default=datetime.datetime.utcnow)
updated_at = Column(
DateTime,
default=datetime.datetime.utcnow,
onupdate=datetime.datetime.utcnow,
)
# account_id = Column(Integer, ForeignKey("accounts.id"), nullable=True)
user_role = relationship("UserRole", back_populates="user", uselist=False)
# account = relationship("Account", back_populates="users")
def __repr__(self):
"""Returns string representation of model instance"""
return "<User {fullname!r}>".format(fullname=self.fullname)

View File

@ -0,0 +1,26 @@
from private_gpt.users.db.base_class import Base
from sqlalchemy import Column, ForeignKey, UniqueConstraint, Integer
from sqlalchemy.orm import relationship
class UserRole(Base):
__tablename__ = "user_roles"
user_id = Column(
Integer,
ForeignKey("users.id"),
primary_key=True,
nullable=False,
)
role_id = Column(
Integer,
ForeignKey("roles.id"),
primary_key=True,
nullable=False,
)
role = relationship("Role")
user = relationship("User", back_populates="user_role", uselist=False)
__table_args__ = (
UniqueConstraint("user_id", "role_id", name="unique_user_role"),
)

View File

@ -0,0 +1,4 @@
from .role import Role, RoleCreate, RoleInDB, RoleUpdate
from .token import TokenSchema, TokenPayload
from .user import User, UserCreate, UserInDB, UserUpdate
from .user_role import UserRole, UserRoleCreate, UserRoleInDB, UserRoleUpdate

View File

@ -0,0 +1,37 @@
from typing import Optional
from pydantic import BaseModel
# Shared properties
class RoleBase(BaseModel):
name: Optional[str]
description: Optional[str]
class Config:
arbitrary_types_allowed = True
# Properties to receive via API on creation
class RoleCreate(RoleBase):
pass
# Properties to receive via API on update
class RoleUpdate(RoleBase):
pass
class RoleInDBBase(RoleBase):
id: int
class Config:
orm_mode = True
# Additional properties to return via API
class Role(RoleInDBBase):
pass
class RoleInDB(RoleInDBBase):
pass

View File

@ -0,0 +1,16 @@
from pydantic import BaseModel
class TokenSchema(BaseModel):
access_token: str
refresh_token: str
class Config:
arbitrary_types_allowed = True
class TokenPayload(BaseModel):
id: int
role: str = None
class Config:
arbitrary_types_allowed = True

View File

@ -0,0 +1,50 @@
from datetime import datetime
from typing import Optional
from pydantic import BaseModel, Field, EmailStr
from private_gpt.users.models.user_role import UserRole
class UserBaseSchema(BaseModel):
email: EmailStr
fullname: str
class Config:
arbitrary_types_allowed = True
class UserCreate(UserBaseSchema):
password: str = Field(alias="password")
class UserUpdate(UserBaseSchema):
last_login: Optional[datetime] = None
class UserLoginSchema(BaseModel):
email: EmailStr = Field(alias="email")
password: str
class Config:
arbitrary_types_allowed = True
class UserSchema(UserBaseSchema):
id: int
user_role: Optional[UserRole]
last_login: Optional[datetime]
created_at: datetime
updated_at: datetime
is_active: bool = Field(default=False)
class Config:
orm_mode = True
json_exclude = {'user_role'}
# Additional properties to return via API
class User(UserSchema):
pass
# Additional properties stored in DB
class UserInDB(UserSchema):
hashed_password: str

View File

@ -0,0 +1,41 @@
from typing import Optional
from private_gpt.users.models.role import Role
from pydantic import BaseModel
# Shared properties
class UserRoleBase(BaseModel):
user_id: Optional[int]
role_id: Optional[int]
class Config:
arbitrary_types_allowed = True
# Properties to receive via API on creation
class UserRoleCreate(UserRoleBase):
pass
# Properties to receive via API on update
class UserRoleUpdate(BaseModel):
role_id: int
class Config:
arbitrary_types_allowed = True
class UserRoleInDBBase(UserRoleBase):
role: Role
class Config:
orm_mode = True
arbitrary_types_allowed = True
# Additional properties to return via API
class UserRole(UserRoleInDBBase):
pass
class UserRoleInDB(UserRoleInDBBase):
pass