[tool.poetry] name = "private-gpt" version = "0.6.2" description = "Private GPT" authors = ["Zylon "] [tool.poetry.dependencies] python = ">=3.11,<3.12" # PrivateGPT fastapi = { extras = ["all"], version = "^0.115.0" } python-multipart = "^0.0.10" injector = "^0.22.0" pyyaml = "^6.0.2" watchdog = "^4.0.1" transformers = "^4.44.2" docx2txt = "^0.8" cryptography = "^3.1" # LlamaIndex core libs llama-index-core = ">=0.11.2,<0.12.0" llama-index-readers-file = "*" # Optional LlamaIndex integration libs llama-index-llms-llama-cpp = {version = "*", optional = true} llama-index-llms-openai = {version ="*", optional = true} llama-index-llms-openai-like = {version ="*", optional = true} llama-index-llms-ollama = {version ="*", optional = true} llama-index-llms-azure-openai = {version ="*", optional = true} llama-index-llms-gemini = {version ="*", optional = true} llama-index-embeddings-ollama = {version ="*", optional = true} llama-index-embeddings-huggingface = {version ="*", optional = true} llama-index-embeddings-openai = {version ="*", optional = true} llama-index-embeddings-azure-openai = {version ="*", optional = true} llama-index-embeddings-gemini = {version ="*", optional = true} llama-index-embeddings-mistralai = {version ="*", optional = true} llama-index-vector-stores-qdrant = {version ="*", optional = true} llama-index-vector-stores-milvus = {version ="*", optional = true} llama-index-vector-stores-chroma = {version ="*", optional = true} llama-index-vector-stores-postgres = {version ="*", optional = true} llama-index-vector-stores-clickhouse = {version ="*", optional = true} llama-index-storage-docstore-postgres = {version ="*", optional = true} llama-index-storage-index-store-postgres = {version ="*", optional = true} # Postgres psycopg2-binary = {version ="^2.9.9", optional = true} asyncpg = {version="^0.29.0", optional = true} # ClickHouse clickhouse-connect = {version = "^0.7.19", optional = true} # Optional Sagemaker dependency boto3 = {version ="^1.35.26", optional = true} # Optional Reranker dependencies torch = {version ="^2.4.1", optional = true} sentence-transformers = {version ="^3.1.1", optional = true} # Optional UI gradio = {version ="^4.44.0", optional = true} ffmpy = {version ="^0.4.0", optional = true} # Optional HF Transformers einops = {version = "^0.8.0", optional = true} retry-async = "^0.1.4" [tool.poetry.extras] ui = ["gradio", "ffmpy"] llms-llama-cpp = ["llama-index-llms-llama-cpp"] llms-openai = ["llama-index-llms-openai"] llms-openai-like = ["llama-index-llms-openai-like"] llms-ollama = ["llama-index-llms-ollama"] llms-sagemaker = ["boto3"] llms-azopenai = ["llama-index-llms-azure-openai"] llms-gemini = ["llama-index-llms-gemini"] embeddings-ollama = ["llama-index-embeddings-ollama"] embeddings-huggingface = ["llama-index-embeddings-huggingface", "einops"] embeddings-openai = ["llama-index-embeddings-openai"] embeddings-sagemaker = ["boto3"] embeddings-azopenai = ["llama-index-embeddings-azure-openai"] embeddings-gemini = ["llama-index-embeddings-gemini"] embeddings-mistral = ["llama-index-embeddings-mistralai"] vector-stores-qdrant = ["llama-index-vector-stores-qdrant"] vector-stores-clickhouse = ["llama-index-vector-stores-clickhouse", "clickhouse_connect"] vector-stores-chroma = ["llama-index-vector-stores-chroma"] vector-stores-postgres = ["llama-index-vector-stores-postgres"] vector-stores-milvus = ["llama-index-vector-stores-milvus"] storage-nodestore-postgres = ["llama-index-storage-docstore-postgres","llama-index-storage-index-store-postgres","psycopg2-binary","asyncpg"] rerank-sentence-transformers = ["torch", "sentence-transformers"] [tool.poetry.group.dev.dependencies] black = "^24" mypy = "^1.11" pre-commit = "^3" pytest = "^8" pytest-cov = "^5" ruff = "^0" pytest-asyncio = "^0.24.0" types-pyyaml = "^6.0.12.20240917" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" # Packages configs ## coverage [tool.coverage.run] branch = true [tool.coverage.report] skip_empty = true precision = 2 ## black [tool.black] target-version = ['py311'] ## ruff # Recommended ruff config for now, to be updated as we go along. [tool.ruff] target-version = 'py311' # See all rules at https://beta.ruff.rs/docs/rules/ lint.select = [ "E", # pycodestyle "W", # pycodestyle "F", # Pyflakes "B", # flake8-bugbear "C4", # flake8-comprehensions "D", # pydocstyle "I", # isort "SIM", # flake8-simplify "TCH", # flake8-type-checking "TID", # flake8-tidy-imports "Q", # flake8-quotes "UP", # pyupgrade "PT", # flake8-pytest-style "RUF", # Ruff-specific rules ] lint.ignore = [ "E501", # "Line too long" # -> line length already regulated by black "PT011", # "pytest.raises() should specify expected exception" # -> would imply to update tests every time you update exception message "SIM102", # "Use a single `if` statement instead of nested `if` statements" # -> too restrictive, "D100", "D101", "D102", "D103", "D104", "D105", "D106", "D107" # -> "Missing docstring in public function too restrictive" ] [tool.ruff.lint.pydocstyle] # Automatically disable rules that are incompatible with Google docstring convention convention = "google" [tool.ruff.lint.pycodestyle] max-doc-length = 88 [tool.ruff.lint.flake8-tidy-imports] ban-relative-imports = "all" [tool.ruff.lint.flake8-type-checking] strict = true runtime-evaluated-base-classes = ["pydantic.BaseModel"] # Pydantic needs to be able to evaluate types at runtime # see https://pypi.org/project/flake8-type-checking/ for flake8-type-checking documentation # see https://beta.ruff.rs/docs/settings/#flake8-type-checking-runtime-evaluated-base-classes for ruff documentation [tool.ruff.lint.per-file-ignores] # Allow missing docstrings for tests "tests/**/*.py" = ["D1"] ## mypy [tool.mypy] python_version = "3.11" strict = true check_untyped_defs = false explicit_package_bases = true warn_unused_ignores = false exclude = ["tests"] [tool.mypy-llama-index] ignore_missing_imports = true [tool.pytest.ini_options] asyncio_mode = "auto" testpaths = ["tests"] addopts = [ "--import-mode=importlib", ]