This commit is contained in:
imartinez 2024-02-29 15:20:26 +01:00
parent 34d48d7b4d
commit 63d3b9f936
7 changed files with 39 additions and 23 deletions

View File

@ -20,7 +20,9 @@ class EmbeddingComponent:
match embedding_mode:
case "local":
try:
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
from llama_index.embeddings.huggingface import ( # type: ignore
HuggingFaceEmbedding,
)
except ImportError as e:
raise ImportError(
"Local dependencies not found, install with `poetry install --extras local`"
@ -45,7 +47,9 @@ class EmbeddingComponent:
)
case "openai":
try:
from llama_index.embeddings.openai import OpenAIEmbedding
from llama_index.embeddings.openai import ( # type: ignore
OpenAIEmbedding,
)
except ImportError as e:
raise ImportError(
"OpenAI dependencies not found, install with `poetry install --extras openai`"

View File

@ -12,15 +12,21 @@ logger = logging.getLogger(__name__)
# Inspired by the `llama_index.core.readers.file.base` module
def _try_loading_included_file_formats() -> dict[str, type[BaseReader]]:
try:
from llama_index.readers.file.docs import DocxReader, HWPReader, PDFReader
from llama_index.readers.file.epub import EpubReader
from llama_index.readers.file.image import ImageReader
from llama_index.readers.file.ipynb import IPYNBReader
from llama_index.readers.file.markdown import MarkdownReader
from llama_index.readers.file.mbox import MboxReader
from llama_index.readers.file.slides import PptxReader
from llama_index.readers.file.tabular import PandasCSVReader
from llama_index.readers.file.video_audio import VideoAudioReader
from llama_index.readers.file.docs import ( # type: ignore
DocxReader,
HWPReader,
PDFReader,
)
from llama_index.readers.file.epub import EpubReader # type: ignore
from llama_index.readers.file.image import ImageReader # type: ignore
from llama_index.readers.file.ipynb import IPYNBReader # type: ignore
from llama_index.readers.file.markdown import MarkdownReader # type: ignore
from llama_index.readers.file.mbox import MboxReader # type: ignore
from llama_index.readers.file.slides import PptxReader # type: ignore
from llama_index.readers.file.tabular import PandasCSVReader # type: ignore
from llama_index.readers.file.video_audio import ( # type: ignore
VideoAudioReader,
)
except ImportError as e:
raise ImportError("`llama-index-readers-file` package not found") from e

View File

@ -32,7 +32,7 @@ class LLMComponent:
match settings.llm.mode:
case "local":
try:
from llama_index.llms.llama_cpp import LlamaCPP
from llama_index.llms.llama_cpp import LlamaCPP # type: ignore
except ImportError as e:
raise ImportError(
"Local dependencies not found, install with `poetry install --extras local`"
@ -70,7 +70,7 @@ class LLMComponent:
)
case "openai":
try:
from llama_index.llms.openai import OpenAI
from llama_index.llms.openai import OpenAI # type: ignore
except ImportError as e:
raise ImportError(
"OpenAI dependencies not found, install with `poetry install --extras openai`"
@ -84,7 +84,7 @@ class LLMComponent:
)
case "openailike":
try:
from llama_index.llms.openai_like import OpenAILike
from llama_index.llms.openai_like import OpenAILike # type: ignore
except ImportError as e:
raise ImportError(
"OpenAILike dependencies not found, install with `poetry install --extras openailike`"
@ -101,7 +101,7 @@ class LLMComponent:
)
case "ollama":
try:
from llama_index.llms.ollama import Ollama
from llama_index.llms.ollama import Ollama # type: ignore
except ImportError as e:
raise ImportError(
"Ollama dependencies not found, install with `poetry install --extras ollama`"

View File

@ -3,7 +3,7 @@ from typing import Any
from llama_index.core.schema import BaseNode, MetadataMode
from llama_index.core.vector_stores.utils import node_to_metadata_dict
from llama_index.vector_stores.chroma import ChromaVectorStore
from llama_index.vector_stores.chroma import ChromaVectorStore # type: ignore
def chunk_list(
@ -22,7 +22,7 @@ def chunk_list(
yield lst[i : i + max_chunk_size]
class BatchedChromaVectorStore(ChromaVectorStore):
class BatchedChromaVectorStore(ChromaVectorStore): # type: ignore
"""Chroma vector store, batching additions to avoid reaching the max batch limit.
In this vector store, embeddings are stored within a ChromaDB collection.

View File

@ -40,7 +40,9 @@ class VectorStoreComponent:
match settings.vectorstore.database:
case "pgvector":
try:
from llama_index.vector_stores.postgres import PGVectorStore
from llama_index.vector_stores.postgres import ( # type: ignore
PGVectorStore,
)
except ImportError as e:
raise ImportError(
"Postgres dependencies not found, install with `poetry install --extras postgres`"
@ -91,7 +93,9 @@ class VectorStoreComponent:
case "qdrant":
try:
from llama_index.vector_stores.qdrant import QdrantVectorStore
from llama_index.vector_stores.qdrant import ( # type: ignore
QdrantVectorStore,
)
from qdrant_client import QdrantClient
except ImportError as e:
raise ImportError(

View File

@ -3,10 +3,9 @@ from pathlib import Path
from typing import Any
from watchdog.events import (
DirCreatedEvent,
DirModifiedEvent,
FileCreatedEvent,
FileModifiedEvent,
FileSystemEvent,
FileSystemEventHandler,
)
from watchdog.observers import Observer
@ -20,11 +19,11 @@ class IngestWatcher:
self.on_file_changed = on_file_changed
class Handler(FileSystemEventHandler):
def on_modified(self, event: DirModifiedEvent | FileModifiedEvent) -> None:
def on_modified(self, event: FileSystemEvent) -> None:
if isinstance(event, FileModifiedEvent):
on_file_changed(Path(event.src_path))
def on_created(self, event: DirCreatedEvent | FileCreatedEvent) -> None:
def on_created(self, event: FileSystemEvent) -> None:
if isinstance(event, FileCreatedEvent):
on_file_changed(Path(event.src_path))

View File

@ -145,6 +145,9 @@ explicit_package_bases = true
warn_unused_ignores = false
exclude = ["tests"]
[tool.mypy-llama-index]
ignore_missing_imports = true
[tool.pytest.ini_options]
asyncio_mode = "auto"
testpaths = ["tests"]