mirror of
https://github.com/imartinez/privateGPT.git
synced 2025-09-23 03:57:13 +00:00
Update poetry lock (#1209)
* Update the version of llama_index used to fix transient openai errors * Update poetry.lock file * Make `local` mode the default mode by default
This commit is contained in:
@@ -44,12 +44,12 @@ class BatchedChromaVectorStore(ChromaVectorStore):
|
||||
)
|
||||
self.chroma_client = chroma_client
|
||||
|
||||
def add(self, nodes: list[BaseNode]) -> list[str]:
|
||||
def add(self, nodes: list[BaseNode], **add_kwargs: Any) -> list[str]:
|
||||
"""Add nodes to index, batching the insertion to avoid issues.
|
||||
|
||||
Args:
|
||||
nodes: List[BaseNode]: list of nodes with embeddings
|
||||
|
||||
add_kwargs: _
|
||||
"""
|
||||
if not self.chroma_client:
|
||||
raise ValueError("Client not initialized")
|
||||
|
@@ -46,8 +46,11 @@ class VectorStoreComponent:
|
||||
"make_this_parameterizable_per_api_call"
|
||||
) # TODO
|
||||
|
||||
self.vector_store = BatchedChromaVectorStore(
|
||||
chroma_client=chroma_client, chroma_collection=chroma_collection
|
||||
self.vector_store = typing.cast(
|
||||
VectorStore,
|
||||
BatchedChromaVectorStore(
|
||||
chroma_client=chroma_client, chroma_collection=chroma_collection
|
||||
),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
Reference in New Issue
Block a user