mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-07 12:00:46 +00:00
fix(VectorStore): fix task concurrency and batch processing issue (#2671)
- Refactor task execution to improve concurrency control - Implement batch processing for vector deletion in Chroma store # Description 1 ,When chunk a knowledge base file, if there are too many chunks , using asyncio.gather(*tasks) will cause the CPU to freeze 2 , When using the Chroma vector database, if the one-time deletion of embedding documents ids exceeds 5461, it will fail  # How Has This Been Tested? # Snapshots:   Include snapshots for easier review. # Checklist: - [x] My code follows the style guidelines of this project - [x] I have already rebased the commits and make the commit message conform to the project standard. - [x] I have performed a self-review of my own code - [ ] I have commented my code, particularly in hard-to-understand areas - [ ] I have made corresponding changes to the documentation - [ ] Any dependent changes have been merged and published in downstream modules
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
"""Index store base class."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
@@ -182,9 +183,7 @@ class IndexStoreBase(ABC):
|
||||
for chunk_group in chunk_groups:
|
||||
tasks.append(self.aload_document(chunk_group))
|
||||
|
||||
import asyncio
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
results = await self._run_tasks_with_concurrency(tasks, max_threads)
|
||||
|
||||
ids = []
|
||||
loaded_cnt = 0
|
||||
@@ -195,6 +194,14 @@ class IndexStoreBase(ABC):
|
||||
|
||||
return ids
|
||||
|
||||
async def _run_tasks_with_concurrency(self, tasks, max_concurrent):
|
||||
results = []
|
||||
for i in range(0, len(tasks), max_concurrent):
|
||||
batch = tasks[i : i + max_concurrent]
|
||||
batch_results = await asyncio.gather(*batch, return_exceptions=True)
|
||||
results.extend(batch_results)
|
||||
return results
|
||||
|
||||
def similar_search(
|
||||
self, text: str, topk: int, filters: Optional[MetadataFilters] = None
|
||||
) -> List[Chunk]:
|
||||
|
Reference in New Issue
Block a user