mirror of
https://github.com/imartinez/privateGPT.git
synced 2025-04-27 11:21:34 +00:00
* Extract optional dependencies * Separate local mode into llms-llama-cpp and embeddings-huggingface for clarity * Support Ollama embeddings * Upgrade to llamaindex 0.10.14. Remove legacy use of ServiceContext in ContextChatEngine * Fix vector retriever filters
162 lines
4.4 KiB
TOML
162 lines
4.4 KiB
TOML
[tool.poetry]
|
|
name = "private-gpt"
|
|
version = "0.4.0"
|
|
description = "Private GPT"
|
|
authors = ["Zylon <hi@zylon.ai>"]
|
|
|
|
[tool.poetry.dependencies]
|
|
python = ">=3.11,<3.12"
|
|
# PrivateGPT
|
|
fastapi = { extras = ["all"], version = "^0.110.0" }
|
|
python-multipart = "^0.0.9"
|
|
injector = "^0.21.0"
|
|
pyyaml = "^6.0.1"
|
|
watchdog = "^4.0.0"
|
|
transformers = "^4.38.2"
|
|
# LlamaIndex core libs
|
|
llama-index-core = "^0.10.14"
|
|
llama-index-readers-file = "^0.1.6"
|
|
# Optional LlamaIndex integration libs
|
|
llama-index-llms-llama-cpp = {version = "^0.1.3", optional = true}
|
|
llama-index-llms-openai = {version = "^0.1.6", optional = true}
|
|
llama-index-llms-openai-like = {version ="^0.1.3", optional = true}
|
|
llama-index-llms-ollama = {version ="^0.1.2", optional = true}
|
|
llama-index-embeddings-ollama = {version ="^0.1.2", optional = true}
|
|
llama-index-embeddings-huggingface = {version ="^0.1.4", optional = true}
|
|
llama-index-embeddings-openai = {version ="^0.1.6", optional = true}
|
|
llama-index-vector-stores-qdrant = {version ="^0.1.3", optional = true}
|
|
llama-index-vector-stores-chroma = {version ="^0.1.4", optional = true}
|
|
llama-index-vector-stores-postgres = {version ="^0.1.2", optional = true}
|
|
# Optional Sagemaker dependency
|
|
boto3 = {version ="^1.34.51", optional = true}
|
|
# Optional UI
|
|
gradio = {version ="^4.19.2", optional = true}
|
|
|
|
[tool.poetry.extras]
|
|
ui = ["gradio"]
|
|
llms-llama-cpp = ["llama-index-llms-llama-cpp"]
|
|
llms-openai = ["llama-index-llms-openai"]
|
|
llms-openai-like = ["llama-index-llms-openai-like"]
|
|
llms-ollama = ["llama-index-llms-ollama"]
|
|
llms-sagemaker = ["boto3"]
|
|
embeddings-ollama = ["llama-index-embeddings-ollama"]
|
|
embeddings-huggingface = ["llama-index-embeddings-huggingface"]
|
|
embeddings-openai = ["llama-index-embeddings-openai"]
|
|
embeddings-sagemaker = ["boto3"]
|
|
vector-stores-qdrant = ["llama-index-vector-stores-qdrant"]
|
|
vector-stores-chroma = ["llama-index-vector-stores-chroma"]
|
|
vector-stores-postgres = ["llama-index-vector-stores-postgres"]
|
|
|
|
|
|
[tool.poetry.group.dev.dependencies]
|
|
black = "^22"
|
|
mypy = "^1.2"
|
|
pre-commit = "^2"
|
|
pytest = "^7"
|
|
pytest-cov = "^3"
|
|
ruff = "^0"
|
|
pytest-asyncio = "^0.21.1"
|
|
types-pyyaml = "^6.0.12.12"
|
|
|
|
[build-system]
|
|
requires = ["poetry-core>=1.0.0"]
|
|
build-backend = "poetry.core.masonry.api"
|
|
|
|
# Packages configs
|
|
|
|
## coverage
|
|
|
|
[tool.coverage.run]
|
|
branch = true
|
|
|
|
[tool.coverage.report]
|
|
skip_empty = true
|
|
precision = 2
|
|
|
|
## black
|
|
|
|
[tool.black]
|
|
target-version = ['py311']
|
|
|
|
## ruff
|
|
# Recommended ruff config for now, to be updated as we go along.
|
|
[tool.ruff]
|
|
target-version = 'py311'
|
|
|
|
# See all rules at https://beta.ruff.rs/docs/rules/
|
|
select = [
|
|
"E", # pycodestyle
|
|
"W", # pycodestyle
|
|
"F", # Pyflakes
|
|
"B", # flake8-bugbear
|
|
"C4", # flake8-comprehensions
|
|
"D", # pydocstyle
|
|
"I", # isort
|
|
"SIM", # flake8-simplify
|
|
"TCH", # flake8-type-checking
|
|
"TID", # flake8-tidy-imports
|
|
"Q", # flake8-quotes
|
|
"UP", # pyupgrade
|
|
"PT", # flake8-pytest-style
|
|
"RUF", # Ruff-specific rules
|
|
]
|
|
|
|
ignore = [
|
|
"E501", # "Line too long"
|
|
# -> line length already regulated by black
|
|
"PT011", # "pytest.raises() should specify expected exception"
|
|
# -> would imply to update tests every time you update exception message
|
|
"SIM102", # "Use a single `if` statement instead of nested `if` statements"
|
|
# -> too restrictive,
|
|
"D100",
|
|
"D101",
|
|
"D102",
|
|
"D103",
|
|
"D104",
|
|
"D105",
|
|
"D106",
|
|
"D107"
|
|
# -> "Missing docstring in public function too restrictive"
|
|
]
|
|
|
|
[tool.ruff.pydocstyle]
|
|
# Automatically disable rules that are incompatible with Google docstring convention
|
|
convention = "google"
|
|
|
|
[tool.ruff.pycodestyle]
|
|
max-doc-length = 88
|
|
|
|
[tool.ruff.flake8-tidy-imports]
|
|
ban-relative-imports = "all"
|
|
|
|
[tool.ruff.flake8-type-checking]
|
|
strict = true
|
|
runtime-evaluated-base-classes = ["pydantic.BaseModel"]
|
|
# Pydantic needs to be able to evaluate types at runtime
|
|
# see https://pypi.org/project/flake8-type-checking/ for flake8-type-checking documentation
|
|
# see https://beta.ruff.rs/docs/settings/#flake8-type-checking-runtime-evaluated-base-classes for ruff documentation
|
|
|
|
[tool.ruff.per-file-ignores]
|
|
# Allow missing docstrings for tests
|
|
"tests/**/*.py" = ["D1"]
|
|
|
|
## mypy
|
|
|
|
[tool.mypy]
|
|
python_version = "3.11"
|
|
strict = true
|
|
check_untyped_defs = false
|
|
explicit_package_bases = true
|
|
warn_unused_ignores = false
|
|
exclude = ["tests"]
|
|
|
|
[tool.mypy-llama-index]
|
|
ignore_missing_imports = true
|
|
|
|
[tool.pytest.ini_options]
|
|
asyncio_mode = "auto"
|
|
testpaths = ["tests"]
|
|
addopts = [
|
|
"--import-mode=importlib",
|
|
]
|