mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-24 03:52:10 +00:00
Add a batch_size param to the add_texts API of pinecone wrapper (#1658)
A safe default value of batch_size is required by the pinecone python client otherwise if the user of add_texts passes too many documents in a single call, they would get a 400 error from pinecone.
This commit is contained in:
@@ -56,6 +56,7 @@ class Pinecone(VectorStore):
|
||||
metadatas: Optional[List[dict]] = None,
|
||||
ids: Optional[List[str]] = None,
|
||||
namespace: Optional[str] = None,
|
||||
batch_size: int = 32,
|
||||
**kwargs: Any,
|
||||
) -> List[str]:
|
||||
"""Run more texts through the embeddings and add to the vectorstore.
|
||||
@@ -79,7 +80,7 @@ class Pinecone(VectorStore):
|
||||
metadata[self._text_key] = text
|
||||
docs.append((ids[i], embedding, metadata))
|
||||
# upsert to Pinecone
|
||||
self._index.upsert(vectors=docs, namespace=namespace)
|
||||
self._index.upsert(vectors=docs, namespace=namespace, batch_size=batch_size)
|
||||
return ids
|
||||
|
||||
def similarity_search_with_score(
|
||||
|
Reference in New Issue
Block a user