mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-05 11:12:47 +00:00
⏳ docstrings: vectorstores
consistency (#9349)
⏳
- updated the top-level descriptions to a consistent format;
- changed several `ValueError` to `ImportError` in the import cases;
- changed the format of several internal functions from "name" to
"_name". So, these functions are not shown in the Top-level API
Reference page (with lists of classes/functions)
This commit is contained in:
parent
d0ff0db698
commit
c19888c12c
@ -78,60 +78,60 @@ __all__ = [
|
|||||||
"AlibabaCloudOpenSearchSettings",
|
"AlibabaCloudOpenSearchSettings",
|
||||||
"AnalyticDB",
|
"AnalyticDB",
|
||||||
"Annoy",
|
"Annoy",
|
||||||
|
"Annoy",
|
||||||
|
"AtlasDB",
|
||||||
"AtlasDB",
|
"AtlasDB",
|
||||||
"AwaDB",
|
"AwaDB",
|
||||||
"AzureSearch",
|
"AzureSearch",
|
||||||
"Bagel",
|
"Bagel",
|
||||||
"Cassandra",
|
"Cassandra",
|
||||||
"Chroma",
|
"Chroma",
|
||||||
|
"Chroma",
|
||||||
|
"Clarifai",
|
||||||
"Clickhouse",
|
"Clickhouse",
|
||||||
"ClickhouseSettings",
|
"ClickhouseSettings",
|
||||||
"DashVector",
|
"DashVector",
|
||||||
"DeepLake",
|
"DeepLake",
|
||||||
|
"DeepLake",
|
||||||
"Dingo",
|
"Dingo",
|
||||||
"DocArrayHnswSearch",
|
"DocArrayHnswSearch",
|
||||||
"DocArrayInMemorySearch",
|
"DocArrayInMemorySearch",
|
||||||
"ElasticVectorSearch",
|
|
||||||
"ElasticKnnSearch",
|
"ElasticKnnSearch",
|
||||||
|
"ElasticVectorSearch",
|
||||||
"ElasticsearchStore",
|
"ElasticsearchStore",
|
||||||
"Epsilla",
|
"Epsilla",
|
||||||
"FAISS",
|
"FAISS",
|
||||||
"PGEmbedding",
|
|
||||||
"Hologres",
|
"Hologres",
|
||||||
"LanceDB",
|
"LanceDB",
|
||||||
"MatchingEngine",
|
|
||||||
"Marqo",
|
"Marqo",
|
||||||
|
"MatchingEngine",
|
||||||
"Meilisearch",
|
"Meilisearch",
|
||||||
"Milvus",
|
"Milvus",
|
||||||
"Zilliz",
|
|
||||||
"SingleStoreDB",
|
|
||||||
"Chroma",
|
|
||||||
"Clarifai",
|
|
||||||
"OpenSearchVectorSearch",
|
|
||||||
"AtlasDB",
|
|
||||||
"DeepLake",
|
|
||||||
"Annoy",
|
|
||||||
"MongoDBAtlasVectorSearch",
|
"MongoDBAtlasVectorSearch",
|
||||||
"MyScale",
|
"MyScale",
|
||||||
"MyScaleSettings",
|
"MyScaleSettings",
|
||||||
"OpenSearchVectorSearch",
|
"OpenSearchVectorSearch",
|
||||||
|
"OpenSearchVectorSearch",
|
||||||
|
"PGEmbedding",
|
||||||
|
"PGVector",
|
||||||
"Pinecone",
|
"Pinecone",
|
||||||
"Qdrant",
|
"Qdrant",
|
||||||
"Redis",
|
"Redis",
|
||||||
"Rockset",
|
"Rockset",
|
||||||
"ScaNN",
|
|
||||||
"SKLearnVectorStore",
|
"SKLearnVectorStore",
|
||||||
|
"ScaNN",
|
||||||
|
"SingleStoreDB",
|
||||||
"SingleStoreDB",
|
"SingleStoreDB",
|
||||||
"StarRocks",
|
"StarRocks",
|
||||||
"SupabaseVectorStore",
|
"SupabaseVectorStore",
|
||||||
"Tair",
|
"Tair",
|
||||||
"Tigris",
|
"Tigris",
|
||||||
"Typesense",
|
"Typesense",
|
||||||
|
"USearch",
|
||||||
"Vectara",
|
"Vectara",
|
||||||
"VectorStore",
|
"VectorStore",
|
||||||
"Weaviate",
|
"Weaviate",
|
||||||
"ZepVectorStore",
|
"ZepVectorStore",
|
||||||
"Zilliz",
|
"Zilliz",
|
||||||
"PGVector",
|
"Zilliz",
|
||||||
"USearch",
|
|
||||||
]
|
]
|
||||||
|
@ -9,6 +9,8 @@ from langchain.vectorstores.pgvector import BaseModel
|
|||||||
|
|
||||||
|
|
||||||
class CollectionStore(BaseModel):
|
class CollectionStore(BaseModel):
|
||||||
|
"""Collection store."""
|
||||||
|
|
||||||
__tablename__ = "langchain_pg_collection"
|
__tablename__ = "langchain_pg_collection"
|
||||||
|
|
||||||
name = sqlalchemy.Column(sqlalchemy.String)
|
name = sqlalchemy.Column(sqlalchemy.String)
|
||||||
@ -48,6 +50,8 @@ class CollectionStore(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class EmbeddingStore(BaseModel):
|
class EmbeddingStore(BaseModel):
|
||||||
|
"""Embedding store."""
|
||||||
|
|
||||||
__tablename__ = "langchain_pg_embedding"
|
__tablename__ = "langchain_pg_embedding"
|
||||||
|
|
||||||
collection_id = sqlalchemy.Column(
|
collection_id = sqlalchemy.Column(
|
||||||
|
@ -12,7 +12,7 @@ logger = logging.getLogger()
|
|||||||
|
|
||||||
|
|
||||||
class AlibabaCloudOpenSearchSettings:
|
class AlibabaCloudOpenSearchSettings:
|
||||||
"""Alibaba Cloud Opensearch Client Configuration.
|
"""`Alibaba Cloud Opensearch` client configuration.
|
||||||
|
|
||||||
Attribute:
|
Attribute:
|
||||||
endpoint (str) : The endpoint of opensearch instance, You can find it
|
endpoint (str) : The endpoint of opensearch instance, You can find it
|
||||||
@ -90,7 +90,7 @@ def create_metadata(fields: Dict[str, Any]) -> Dict[str, Any]:
|
|||||||
|
|
||||||
|
|
||||||
class AlibabaCloudOpenSearch(VectorStore):
|
class AlibabaCloudOpenSearch(VectorStore):
|
||||||
"""Alibaba Cloud OpenSearch Vector Store"""
|
"""`Alibaba Cloud OpenSearch` vector store."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -102,7 +102,7 @@ class AlibabaCloudOpenSearch(VectorStore):
|
|||||||
from alibabacloud_ha3engine import client, models
|
from alibabacloud_ha3engine import client, models
|
||||||
from alibabacloud_tea_util import models as util_models
|
from alibabacloud_tea_util import models as util_models
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import alibaba cloud opensearch python package. "
|
"Could not import alibaba cloud opensearch python package. "
|
||||||
"Please install it with `pip install alibabacloud-ha3engine`."
|
"Please install it with `pip install alibabacloud-ha3engine`."
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""VectorStore wrapper around a Postgres/PGVector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -25,7 +24,7 @@ Base = declarative_base() # type: Any
|
|||||||
|
|
||||||
|
|
||||||
class AnalyticDB(VectorStore):
|
class AnalyticDB(VectorStore):
|
||||||
"""VectorStore implementation using AnalyticDB.
|
"""`AnalyticDB` (distributed PostgreSQL) vector store.
|
||||||
|
|
||||||
AnalyticDB is a distributed full postgresql syntax cloud-native database.
|
AnalyticDB is a distributed full postgresql syntax cloud-native database.
|
||||||
- `connection_string` is a postgres connection string.
|
- `connection_string` is a postgres connection string.
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Annoy vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@ -26,7 +25,7 @@ def dependable_annoy_import() -> Any:
|
|||||||
try:
|
try:
|
||||||
import annoy
|
import annoy
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import annoy python package. "
|
"Could not import annoy python package. "
|
||||||
"Please install it with `pip install --user annoy` "
|
"Please install it with `pip install --user annoy` "
|
||||||
)
|
)
|
||||||
@ -34,7 +33,7 @@ def dependable_annoy_import() -> Any:
|
|||||||
|
|
||||||
|
|
||||||
class Annoy(VectorStore):
|
class Annoy(VectorStore):
|
||||||
"""Wrapper around Annoy vector database.
|
"""`Annoy` vector store.
|
||||||
|
|
||||||
To use, you should have the ``annoy`` python package installed.
|
To use, you should have the ``annoy`` python package installed.
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Atlas by Nomic."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -15,7 +14,9 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class AtlasDB(VectorStore):
|
class AtlasDB(VectorStore):
|
||||||
"""Wrapper around Atlas: Nomic's neural database and rhizomatic instrument.
|
"""`Atlas` vector store.
|
||||||
|
|
||||||
|
Atlas is the `Nomic's` neural database and `rhizomatic` instrument.
|
||||||
|
|
||||||
To use, you should have the ``nomic`` python package installed.
|
To use, you should have the ``nomic`` python package installed.
|
||||||
|
|
||||||
@ -61,7 +62,7 @@ class AtlasDB(VectorStore):
|
|||||||
import nomic
|
import nomic
|
||||||
from nomic import AtlasProject
|
from nomic import AtlasProject
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import nomic python package. "
|
"Could not import nomic python package. "
|
||||||
"Please install it with `pip install nomic`."
|
"Please install it with `pip install nomic`."
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around AwaDB for embedding vectors"""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -20,7 +19,7 @@ DEFAULT_TOPN = 4
|
|||||||
|
|
||||||
|
|
||||||
class AwaDB(VectorStore):
|
class AwaDB(VectorStore):
|
||||||
"""Interface implemented by AwaDB vector stores."""
|
"""`AwaDB` vector store."""
|
||||||
|
|
||||||
_DEFAULT_TABLE_NAME = "langchain_awadb"
|
_DEFAULT_TABLE_NAME = "langchain_awadb"
|
||||||
|
|
||||||
@ -50,7 +49,7 @@ class AwaDB(VectorStore):
|
|||||||
try:
|
try:
|
||||||
import awadb
|
import awadb
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import awadb python package. "
|
"Could not import awadb python package. "
|
||||||
"Please install it with `pip install awadb`."
|
"Please install it with `pip install awadb`."
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Azure Cognitive Search."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
@ -177,7 +176,7 @@ def _get_search_client(
|
|||||||
|
|
||||||
|
|
||||||
class AzureSearch(VectorStore):
|
class AzureSearch(VectorStore):
|
||||||
"""Azure Cognitive Search vector store."""
|
"""`Azure Cognitive Search` vector store."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -526,7 +525,7 @@ class AzureSearch(VectorStore):
|
|||||||
|
|
||||||
|
|
||||||
class AzureSearchVectorStoreRetriever(BaseRetriever):
|
class AzureSearchVectorStoreRetriever(BaseRetriever):
|
||||||
"""Retriever that uses Azure Search to find similar documents."""
|
"""Retriever that uses `Azure Cognitive Search`."""
|
||||||
|
|
||||||
vectorstore: AzureSearch
|
vectorstore: AzureSearch
|
||||||
"""Azure Search instance used to find similar documents."""
|
"""Azure Search instance used to find similar documents."""
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""BagelDB integration"""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
@ -43,7 +42,7 @@ def _results_to_docs_and_scores(results: Any) -> List[Tuple[Document, float]]:
|
|||||||
|
|
||||||
|
|
||||||
class Bagel(VectorStore):
|
class Bagel(VectorStore):
|
||||||
"""Wrapper around BagelDB.ai vector store.
|
"""``BagelDB.ai`` vector store.
|
||||||
|
|
||||||
To use, you should have the ``betabageldb`` python package installed.
|
To use, you should have the ``betabageldb`` python package installed.
|
||||||
|
|
||||||
@ -70,7 +69,7 @@ class Bagel(VectorStore):
|
|||||||
import bagel
|
import bagel
|
||||||
import bagel.config
|
import bagel.config
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError("Please install bagel `pip install betabageldb`.")
|
raise ImportError("Please install bagel `pip install betabageldb`.")
|
||||||
if client is not None:
|
if client is not None:
|
||||||
self._client_settings = client_settings
|
self._client_settings = client_settings
|
||||||
self._client = client
|
self._client = client
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
"""Interface for vector stores."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
@ -37,7 +35,7 @@ VST = TypeVar("VST", bound="VectorStore")
|
|||||||
|
|
||||||
|
|
||||||
class VectorStore(ABC):
|
class VectorStore(ABC):
|
||||||
"""Interface for vector stores."""
|
"""Interface for vector store."""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def add_texts(
|
def add_texts(
|
||||||
@ -520,7 +518,7 @@ class VectorStore(ABC):
|
|||||||
|
|
||||||
|
|
||||||
class VectorStoreRetriever(BaseRetriever):
|
class VectorStoreRetriever(BaseRetriever):
|
||||||
"""Retriever class for VectorStore."""
|
"""Base Retriever class for VectorStore."""
|
||||||
|
|
||||||
vectorstore: VectorStore
|
vectorstore: VectorStore
|
||||||
"""VectorStore to use for retrieval."""
|
"""VectorStore to use for retrieval."""
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Cassandra vector-store capabilities, based on cassIO."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import typing
|
import typing
|
||||||
@ -19,8 +18,9 @@ CVST = TypeVar("CVST", bound="Cassandra")
|
|||||||
|
|
||||||
|
|
||||||
class Cassandra(VectorStore):
|
class Cassandra(VectorStore):
|
||||||
"""Wrapper around Cassandra embeddings platform.
|
"""`Cassandra` vector store.
|
||||||
|
|
||||||
|
It based on the Cassandra vector-store capabilities, based on cassIO.
|
||||||
There is no notion of a default table name, since each embedding
|
There is no notion of a default table name, since each embedding
|
||||||
function implies its own vector dimension, which is part of the schema.
|
function implies its own vector dimension, which is part of the schema.
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around ChromaDB embeddings platform."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -50,7 +49,7 @@ def _results_to_docs_and_scores(results: Any) -> List[Tuple[Document, float]]:
|
|||||||
|
|
||||||
|
|
||||||
class Chroma(VectorStore):
|
class Chroma(VectorStore):
|
||||||
"""Wrapper around ChromaDB embeddings platform.
|
"""`ChromaDB` vector store.
|
||||||
|
|
||||||
To use, you should have the ``chromadb`` python package installed.
|
To use, you should have the ``chromadb`` python package installed.
|
||||||
|
|
||||||
@ -76,12 +75,12 @@ class Chroma(VectorStore):
|
|||||||
client: Optional[chromadb.Client] = None,
|
client: Optional[chromadb.Client] = None,
|
||||||
relevance_score_fn: Optional[Callable[[float], float]] = None,
|
relevance_score_fn: Optional[Callable[[float], float]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize with Chroma client."""
|
"""Initialize with a Chroma client."""
|
||||||
try:
|
try:
|
||||||
import chromadb
|
import chromadb
|
||||||
import chromadb.config
|
import chromadb.config
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import chromadb python package. "
|
"Could not import chromadb python package. "
|
||||||
"Please install it with `pip install chromadb`."
|
"Please install it with `pip install chromadb`."
|
||||||
)
|
)
|
||||||
|
@ -16,7 +16,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class Clarifai(VectorStore):
|
class Clarifai(VectorStore):
|
||||||
"""Wrapper around Clarifai AI platform's vector store.
|
"""`Clarifai AI` vector store.
|
||||||
|
|
||||||
To use, you should have the ``clarifai`` python package installed.
|
To use, you should have the ``clarifai`` python package installed.
|
||||||
|
|
||||||
@ -55,7 +55,7 @@ class Clarifai(VectorStore):
|
|||||||
from clarifai.auth.helper import DEFAULT_BASE, ClarifaiAuthHelper
|
from clarifai.auth.helper import DEFAULT_BASE, ClarifaiAuthHelper
|
||||||
from clarifai.client import create_stub
|
from clarifai.client import create_stub
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import clarifai python package. "
|
"Could not import clarifai python package. "
|
||||||
"Please install it with `pip install clarifai`."
|
"Please install it with `pip install clarifai`."
|
||||||
)
|
)
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
"""Wrapper around open source ClickHouse VectorSearch capability."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -33,7 +31,7 @@ def has_mul_sub_str(s: str, *args: Any) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
class ClickhouseSettings(BaseSettings):
|
class ClickhouseSettings(BaseSettings):
|
||||||
"""ClickHouse Client Configuration
|
"""`ClickHouse` client configuration.
|
||||||
|
|
||||||
Attribute:
|
Attribute:
|
||||||
clickhouse_host (str) : An URL to connect to MyScale backend.
|
clickhouse_host (str) : An URL to connect to MyScale backend.
|
||||||
@ -101,7 +99,7 @@ class ClickhouseSettings(BaseSettings):
|
|||||||
|
|
||||||
|
|
||||||
class Clickhouse(VectorStore):
|
class Clickhouse(VectorStore):
|
||||||
"""Wrapper around ClickHouse vector database
|
"""`ClickHouse VectorSearch` vector store.
|
||||||
|
|
||||||
You need a `clickhouse-connect` python package, and a valid account
|
You need a `clickhouse-connect` python package, and a valid account
|
||||||
to connect to ClickHouse.
|
to connect to ClickHouse.
|
||||||
@ -130,7 +128,7 @@ class Clickhouse(VectorStore):
|
|||||||
try:
|
try:
|
||||||
from clickhouse_connect import get_client
|
from clickhouse_connect import get_client
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import clickhouse connect python package. "
|
"Could not import clickhouse connect python package. "
|
||||||
"Please install it with `pip install clickhouse-connect`."
|
"Please install it with `pip install clickhouse-connect`."
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around DashVector vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -23,7 +22,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class DashVector(VectorStore):
|
class DashVector(VectorStore):
|
||||||
"""Wrapper around DashVector vector database.
|
"""`DashVector` vector store.
|
||||||
|
|
||||||
To use, you should have the ``dashvector`` python package installed.
|
To use, you should have the ``dashvector`` python package installed.
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Activeloop Deep Lake."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -24,9 +23,9 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class DeepLake(VectorStore):
|
class DeepLake(VectorStore):
|
||||||
"""Wrapper around Deep Lake, a data lake for deep learning applications.
|
"""`Activeloop Deep Lake` vector store.
|
||||||
|
|
||||||
We integrated deeplake's similarity search and filtering for fast prototyping,
|
We integrated deeplake's similarity search and filtering for fast prototyping.
|
||||||
Now, it supports Tensor Query Language (TQL) for production use cases
|
Now, it supports Tensor Query Language (TQL) for production use cases
|
||||||
over billion rows.
|
over billion rows.
|
||||||
|
|
||||||
@ -126,7 +125,7 @@ class DeepLake(VectorStore):
|
|||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
|
|
||||||
if _DEEPLAKE_INSTALLED is False:
|
if _DEEPLAKE_INSTALLED is False:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import deeplake python package. "
|
"Could not import deeplake python package. "
|
||||||
"Please install it with `pip install deeplake[enterprise]`."
|
"Please install it with `pip install deeplake[enterprise]`."
|
||||||
)
|
)
|
||||||
@ -135,7 +134,7 @@ class DeepLake(VectorStore):
|
|||||||
kwargs.get("runtime") == {"tensor_db": True}
|
kwargs.get("runtime") == {"tensor_db": True}
|
||||||
and version_compare(deeplake.__version__, "3.6.7") == -1
|
and version_compare(deeplake.__version__, "3.6.7") == -1
|
||||||
):
|
):
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"To use tensor_db option you need to update deeplake to `3.6.7`. "
|
"To use tensor_db option you need to update deeplake to `3.6.7`. "
|
||||||
f"Currently installed deeplake version is {deeplake.__version__}. "
|
f"Currently installed deeplake version is {deeplake.__version__}. "
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around the Dingo vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -16,7 +15,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class Dingo(VectorStore):
|
class Dingo(VectorStore):
|
||||||
"""Wrapper around Dingo vector database.
|
"""`Dingo` vector store.
|
||||||
|
|
||||||
To use, you should have the ``dingodb`` python package installed.
|
To use, you should have the ``dingodb`` python package installed.
|
||||||
|
|
||||||
|
@ -33,6 +33,8 @@ def _check_docarray_import() -> None:
|
|||||||
|
|
||||||
|
|
||||||
class DocArrayIndex(VectorStore, ABC):
|
class DocArrayIndex(VectorStore, ABC):
|
||||||
|
"""Base class for `DocArray` based vector stores."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
doc_index: "BaseDocIndex",
|
doc_index: "BaseDocIndex",
|
||||||
@ -67,7 +69,7 @@ class DocArrayIndex(VectorStore, ABC):
|
|||||||
metadatas: Optional[List[dict]] = None,
|
metadatas: Optional[List[dict]] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
"""Run more texts through the embeddings and add to the vectorstore.
|
"""Embed texts and add to the vector store.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
texts: Iterable of strings to add to the vectorstore.
|
texts: Iterable of strings to add to the vectorstore.
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Hnswlib store."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any, List, Literal, Optional
|
from typing import Any, List, Literal, Optional
|
||||||
@ -11,7 +10,7 @@ from langchain.vectorstores.docarray.base import (
|
|||||||
|
|
||||||
|
|
||||||
class DocArrayHnswSearch(DocArrayIndex):
|
class DocArrayHnswSearch(DocArrayIndex):
|
||||||
"""Wrapper around HnswLib storage.
|
"""`HnswLib` storage using `DocArray` package.
|
||||||
|
|
||||||
To use it, you should have the ``docarray`` package with version >=0.32.0 installed.
|
To use it, you should have the ``docarray`` package with version >=0.32.0 installed.
|
||||||
You can install it with `pip install "langchain[docarray]"`.
|
You can install it with `pip install "langchain[docarray]"`.
|
||||||
|
@ -11,7 +11,7 @@ from langchain.vectorstores.docarray.base import (
|
|||||||
|
|
||||||
|
|
||||||
class DocArrayInMemorySearch(DocArrayIndex):
|
class DocArrayInMemorySearch(DocArrayIndex):
|
||||||
"""Wrapper around in-memory storage for exact search.
|
"""In-memory `DocArray` storage for exact search.
|
||||||
|
|
||||||
To use it, you should have the ``docarray`` package with version >=0.32.0 installed.
|
To use it, you should have the ``docarray`` package with version >=0.32.0 installed.
|
||||||
You can install it with `pip install "langchain[docarray]"`.
|
You can install it with `pip install "langchain[docarray]"`.
|
||||||
|
@ -53,9 +53,9 @@ def _default_script_query(query_vector: List[float], filter: Optional[dict]) ->
|
|||||||
|
|
||||||
@deprecated("0.0.265", alternative="ElasticsearchStore class.", pending=True)
|
@deprecated("0.0.265", alternative="ElasticsearchStore class.", pending=True)
|
||||||
class ElasticVectorSearch(VectorStore):
|
class ElasticVectorSearch(VectorStore):
|
||||||
"""Wrapper around Elasticsearch as a vector database.
|
"""[DEPRECATED] `Elasticsearch` vector store.
|
||||||
|
|
||||||
To connect to an Elasticsearch instance that does not require
|
To connect to an `Elasticsearch` instance that does not require
|
||||||
login credentials, pass the Elasticsearch URL and index name along with the
|
login credentials, pass the Elasticsearch URL and index name along with the
|
||||||
embedding object to the constructor.
|
embedding object to the constructor.
|
||||||
|
|
||||||
@ -340,11 +340,10 @@ class ElasticVectorSearch(VectorStore):
|
|||||||
|
|
||||||
|
|
||||||
class ElasticKnnSearch(VectorStore):
|
class ElasticKnnSearch(VectorStore):
|
||||||
"""
|
"""[DEPRECATED] `Elasticsearch` with k-nearest neighbor search
|
||||||
ElasticKnnSearch is a class for performing k-nearest neighbor
|
(`k-NN`) vector store.
|
||||||
(k-NN) searches on text data using Elasticsearch.
|
|
||||||
|
|
||||||
This class is used to create an Elasticsearch index of text data that
|
It creates an Elasticsearch index of text data that
|
||||||
can be searched using k-NN search. The text data is transformed into
|
can be searched using k-NN search. The text data is transformed into
|
||||||
vector embeddings using a provided embedding model, and these embeddings
|
vector embeddings using a provided embedding model, and these embeddings
|
||||||
are stored in the Elasticsearch index.
|
are stored in the Elasticsearch index.
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
"""Wrapper around Elasticsearch vector database."""
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
@ -28,6 +26,8 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class BaseRetrievalStrategy(ABC):
|
class BaseRetrievalStrategy(ABC):
|
||||||
|
"""Base class for `Elasticsearch` retrieval strategies."""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def query(
|
def query(
|
||||||
self,
|
self,
|
||||||
@ -109,6 +109,8 @@ class BaseRetrievalStrategy(ABC):
|
|||||||
|
|
||||||
|
|
||||||
class ApproxRetrievalStrategy(BaseRetrievalStrategy):
|
class ApproxRetrievalStrategy(BaseRetrievalStrategy):
|
||||||
|
"""Approximate retrieval strategy using the `HNSW` algorithm."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
query_model_id: Optional[str] = None,
|
query_model_id: Optional[str] = None,
|
||||||
@ -211,6 +213,8 @@ class ApproxRetrievalStrategy(BaseRetrievalStrategy):
|
|||||||
|
|
||||||
|
|
||||||
class ExactRetrievalStrategy(BaseRetrievalStrategy):
|
class ExactRetrievalStrategy(BaseRetrievalStrategy):
|
||||||
|
"""Exact retrieval strategy using the `script_score` query."""
|
||||||
|
|
||||||
def query(
|
def query(
|
||||||
self,
|
self,
|
||||||
query_vector: Union[List[float], None],
|
query_vector: Union[List[float], None],
|
||||||
@ -276,6 +280,8 @@ class ExactRetrievalStrategy(BaseRetrievalStrategy):
|
|||||||
|
|
||||||
|
|
||||||
class SparseRetrievalStrategy(BaseRetrievalStrategy):
|
class SparseRetrievalStrategy(BaseRetrievalStrategy):
|
||||||
|
"""Sparse retrieval strategy using the `text_expansion` processor."""
|
||||||
|
|
||||||
def __init__(self, model_id: Optional[str] = None):
|
def __init__(self, model_id: Optional[str] = None):
|
||||||
self.model_id = model_id or ".elser_model_1"
|
self.model_id = model_id or ".elser_model_1"
|
||||||
|
|
||||||
@ -355,8 +361,7 @@ class SparseRetrievalStrategy(BaseRetrievalStrategy):
|
|||||||
|
|
||||||
|
|
||||||
class ElasticsearchStore(VectorStore):
|
class ElasticsearchStore(VectorStore):
|
||||||
|
"""`Elasticsearch` vector store.
|
||||||
"""Wrapper around Elasticsearch search database.
|
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around FAISS vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import operator
|
import operator
|
||||||
@ -65,7 +64,7 @@ def _len_check_if_sized(x: Any, y: Any, x_name: str, y_name: str) -> None:
|
|||||||
|
|
||||||
|
|
||||||
class FAISS(VectorStore):
|
class FAISS(VectorStore):
|
||||||
"""Wrapper around FAISS vector database.
|
"""`Meta Faiss` vector store.
|
||||||
|
|
||||||
To use, you must have the ``faiss`` python package installed.
|
To use, you must have the ``faiss`` python package installed.
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""VectorStore wrapper around a Hologres database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -16,7 +15,7 @@ _LANGCHAIN_DEFAULT_TABLE_NAME = "langchain_pg_embedding"
|
|||||||
|
|
||||||
|
|
||||||
class HologresWrapper:
|
class HologresWrapper:
|
||||||
"""Wrapper around Hologres service."""
|
"""`Hologres API` wrapper."""
|
||||||
|
|
||||||
def __init__(self, connection_string: str, ndims: int, table_name: str) -> None:
|
def __init__(self, connection_string: str, ndims: int, table_name: str) -> None:
|
||||||
"""Initialize the wrapper.
|
"""Initialize the wrapper.
|
||||||
@ -114,7 +113,7 @@ document text);"""
|
|||||||
|
|
||||||
|
|
||||||
class Hologres(VectorStore):
|
class Hologres(VectorStore):
|
||||||
"""VectorStore implementation using Hologres.
|
"""`Hologres API` vector store.
|
||||||
|
|
||||||
- `connection_string` is a hologres connection string.
|
- `connection_string` is a hologres connection string.
|
||||||
- `embedding_function` any embedding function implementing
|
- `embedding_function` any embedding function implementing
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around LanceDB vector database"""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
@ -10,7 +9,7 @@ from langchain.vectorstores.base import VectorStore
|
|||||||
|
|
||||||
|
|
||||||
class LanceDB(VectorStore):
|
class LanceDB(VectorStore):
|
||||||
"""Wrapper around LanceDB vector database.
|
"""`LanceDB` vector store.
|
||||||
|
|
||||||
To use, you should have ``lancedb`` python package installed.
|
To use, you should have ``lancedb`` python package installed.
|
||||||
|
|
||||||
@ -36,7 +35,7 @@ class LanceDB(VectorStore):
|
|||||||
try:
|
try:
|
||||||
import lancedb
|
import lancedb
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import lancedb python package. "
|
"Could not import lancedb python package. "
|
||||||
"Please install it with `pip install lancedb`."
|
"Please install it with `pip install lancedb`."
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around weaviate vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -25,7 +24,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
class Marqo(VectorStore):
|
class Marqo(VectorStore):
|
||||||
"""Wrapper around Marqo database.
|
"""`Marqo` vector store.
|
||||||
|
|
||||||
Marqo indexes have their own models associated with them to generate your
|
Marqo indexes have their own models associated with them to generate your
|
||||||
embeddings. This means that you can selected from a range of different models
|
embeddings. This means that you can selected from a range of different models
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Vertex Matching Engine implementation of the vector store."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -21,7 +20,7 @@ logger = logging.getLogger()
|
|||||||
|
|
||||||
|
|
||||||
class MatchingEngine(VectorStore):
|
class MatchingEngine(VectorStore):
|
||||||
"""Vertex Matching Engine implementation of the vector store.
|
"""`Google Vertex AI Matching Engine` vector store.
|
||||||
|
|
||||||
While the embeddings are stored in the Matching Engine, the embedded
|
While the embeddings are stored in the Matching Engine, the embedded
|
||||||
documents will be stored in GCS.
|
documents will be stored in GCS.
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Meilisearch vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
@ -45,7 +44,7 @@ def _create_client(
|
|||||||
|
|
||||||
|
|
||||||
class Meilisearch(VectorStore):
|
class Meilisearch(VectorStore):
|
||||||
"""Initialize wrapper around Meilisearch vector database.
|
"""`Meilisearch` vector store.
|
||||||
|
|
||||||
To use this, you need to have `meilisearch` python package installed,
|
To use this, you need to have `meilisearch` python package installed,
|
||||||
and a running Meilisearch instance.
|
and a running Meilisearch instance.
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around the Milvus vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -24,19 +23,18 @@ DEFAULT_MILVUS_CONNECTION = {
|
|||||||
|
|
||||||
|
|
||||||
class Milvus(VectorStore):
|
class Milvus(VectorStore):
|
||||||
"""Initialize wrapper around the milvus vector database.
|
"""`Milvus` vector store.
|
||||||
|
|
||||||
In order to use this you need to have `pymilvus` installed and a
|
You need to install `pymilvus` and run Milvus.
|
||||||
running Milvus
|
|
||||||
|
|
||||||
See the following documentation for how to run a Milvus instance:
|
See the following documentation for how to run a Milvus instance:
|
||||||
https://milvus.io/docs/install_standalone-docker.md
|
https://milvus.io/docs/install_standalone-docker.md
|
||||||
|
|
||||||
If looking for a hosted Milvus, take a look at this documentation:
|
If looking for a hosted Milvus, take a look at this documentation:
|
||||||
https://zilliz.com/cloud and make use of the Zilliz vectorstore found in
|
https://zilliz.com/cloud and make use of the Zilliz vectorstore found in
|
||||||
this project,
|
this project.
|
||||||
|
|
||||||
IF USING L2/IP metric IT IS HIGHLY SUGGESTED TO NORMALIZE YOUR DATA.
|
IF USING L2/IP metric, IT IS HIGHLY SUGGESTED TO NORMALIZE YOUR DATA.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
embedding_function (Embeddings): Function used to embed the text.
|
embedding_function (Embeddings): Function used to embed the text.
|
||||||
|
@ -32,7 +32,7 @@ DEFAULT_INSERT_BATCH_SIZE = 100
|
|||||||
|
|
||||||
|
|
||||||
class MongoDBAtlasVectorSearch(VectorStore):
|
class MongoDBAtlasVectorSearch(VectorStore):
|
||||||
"""Wrapper around MongoDB Atlas Vector Search.
|
"""`MongoDB Atlas Vector Search` vector store.
|
||||||
|
|
||||||
To use, you should have both:
|
To use, you should have both:
|
||||||
- the ``pymongo`` python package installed
|
- the ``pymongo`` python package installed
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around MyScale vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -32,7 +31,7 @@ def has_mul_sub_str(s: str, *args: Any) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
class MyScaleSettings(BaseSettings):
|
class MyScaleSettings(BaseSettings):
|
||||||
"""MyScale Client Configuration
|
"""MyScale client configuration.
|
||||||
|
|
||||||
Attribute:
|
Attribute:
|
||||||
myscale_host (str) : An URL to connect to MyScale backend.
|
myscale_host (str) : An URL to connect to MyScale backend.
|
||||||
@ -93,13 +92,13 @@ class MyScaleSettings(BaseSettings):
|
|||||||
|
|
||||||
|
|
||||||
class MyScale(VectorStore):
|
class MyScale(VectorStore):
|
||||||
"""Wrapper around MyScale vector database
|
"""`MyScale` vector store.
|
||||||
|
|
||||||
You need a `clickhouse-connect` python package, and a valid account
|
You need a `clickhouse-connect` python package, and a valid account
|
||||||
to connect to MyScale.
|
to connect to MyScale.
|
||||||
|
|
||||||
MyScale can not only search with simple vector indexes,
|
MyScale can not only search with simple vector indexes.
|
||||||
it also supports complex query with multiple conditions,
|
It also supports a complex query with multiple conditions,
|
||||||
constraints and even sub-queries.
|
constraints and even sub-queries.
|
||||||
|
|
||||||
For more information, please visit
|
For more information, please visit
|
||||||
@ -122,7 +121,7 @@ class MyScale(VectorStore):
|
|||||||
try:
|
try:
|
||||||
from clickhouse_connect import get_client
|
from clickhouse_connect import get_client
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import clickhouse connect python package. "
|
"Could not import clickhouse connect python package. "
|
||||||
"Please install it with `pip install clickhouse-connect`."
|
"Please install it with `pip install clickhouse-connect`."
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around OpenSearch vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
@ -26,7 +25,7 @@ def _import_opensearch() -> Any:
|
|||||||
try:
|
try:
|
||||||
from opensearchpy import OpenSearch
|
from opensearchpy import OpenSearch
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(IMPORT_OPENSEARCH_PY_ERROR)
|
raise ImportError(IMPORT_OPENSEARCH_PY_ERROR)
|
||||||
return OpenSearch
|
return OpenSearch
|
||||||
|
|
||||||
|
|
||||||
@ -35,7 +34,7 @@ def _import_bulk() -> Any:
|
|||||||
try:
|
try:
|
||||||
from opensearchpy.helpers import bulk
|
from opensearchpy.helpers import bulk
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(IMPORT_OPENSEARCH_PY_ERROR)
|
raise ImportError(IMPORT_OPENSEARCH_PY_ERROR)
|
||||||
return bulk
|
return bulk
|
||||||
|
|
||||||
|
|
||||||
@ -44,7 +43,7 @@ def _import_not_found_error() -> Any:
|
|||||||
try:
|
try:
|
||||||
from opensearchpy.exceptions import NotFoundError
|
from opensearchpy.exceptions import NotFoundError
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(IMPORT_OPENSEARCH_PY_ERROR)
|
raise ImportError(IMPORT_OPENSEARCH_PY_ERROR)
|
||||||
return NotFoundError
|
return NotFoundError
|
||||||
|
|
||||||
|
|
||||||
@ -54,7 +53,7 @@ def _get_opensearch_client(opensearch_url: str, **kwargs: Any) -> Any:
|
|||||||
opensearch = _import_opensearch()
|
opensearch = _import_opensearch()
|
||||||
client = opensearch(opensearch_url, **kwargs)
|
client = opensearch(opensearch_url, **kwargs)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
f"OpenSearch client string provided is not in proper format. "
|
f"OpenSearch client string provided is not in proper format. "
|
||||||
f"Got error: {e} "
|
f"Got error: {e} "
|
||||||
)
|
)
|
||||||
@ -315,7 +314,7 @@ def _get_kwargs_value(kwargs: Any, key: str, default_value: Any) -> Any:
|
|||||||
|
|
||||||
|
|
||||||
class OpenSearchVectorSearch(VectorStore):
|
class OpenSearchVectorSearch(VectorStore):
|
||||||
"""Wrapper around OpenSearch as a vector database.
|
"""`Amazon OpenSearch Vector Engine` vector store.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""VectorStore wrapper around a Postgres database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -23,11 +22,15 @@ _LANGCHAIN_DEFAULT_COLLECTION_NAME = "langchain"
|
|||||||
|
|
||||||
|
|
||||||
class BaseModel(Base):
|
class BaseModel(Base):
|
||||||
|
"""Base model for all SQL stores."""
|
||||||
|
|
||||||
__abstract__ = True
|
__abstract__ = True
|
||||||
uuid = sqlalchemy.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
uuid = sqlalchemy.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
|
||||||
|
|
||||||
class CollectionStore(BaseModel):
|
class CollectionStore(BaseModel):
|
||||||
|
"""Collection store."""
|
||||||
|
|
||||||
__tablename__ = "langchain_pg_collection"
|
__tablename__ = "langchain_pg_collection"
|
||||||
|
|
||||||
name = sqlalchemy.Column(sqlalchemy.String)
|
name = sqlalchemy.Column(sqlalchemy.String)
|
||||||
@ -67,6 +70,8 @@ class CollectionStore(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class EmbeddingStore(BaseModel):
|
class EmbeddingStore(BaseModel):
|
||||||
|
"""Embedding store."""
|
||||||
|
|
||||||
__tablename__ = "langchain_pg_embedding"
|
__tablename__ = "langchain_pg_embedding"
|
||||||
|
|
||||||
collection_id = sqlalchemy.Column(
|
collection_id = sqlalchemy.Column(
|
||||||
@ -87,15 +92,14 @@ class EmbeddingStore(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class QueryResult:
|
class QueryResult:
|
||||||
"""QueryResult is a result from a query."""
|
"""Result from a query."""
|
||||||
|
|
||||||
EmbeddingStore: EmbeddingStore
|
EmbeddingStore: EmbeddingStore
|
||||||
distance: float
|
distance: float
|
||||||
|
|
||||||
|
|
||||||
class PGEmbedding(VectorStore):
|
class PGEmbedding(VectorStore):
|
||||||
"""
|
"""`Postgres` with the `pg_embedding` extension as a vector store.
|
||||||
VectorStore implementation using Postgres and the pg_embedding extension.
|
|
||||||
|
|
||||||
pg_embedding uses sequential scan by default. but you can create a HNSW index
|
pg_embedding uses sequential scan by default. but you can create a HNSW index
|
||||||
using the create_hnsw_index method.
|
using the create_hnsw_index method.
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""VectorStore wrapper around a Postgres/PGVector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
@ -46,12 +45,14 @@ _LANGCHAIN_DEFAULT_COLLECTION_NAME = "langchain"
|
|||||||
|
|
||||||
|
|
||||||
class BaseModel(Base):
|
class BaseModel(Base):
|
||||||
|
"""Base model for the SQL stores."""
|
||||||
|
|
||||||
__abstract__ = True
|
__abstract__ = True
|
||||||
uuid = sqlalchemy.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
uuid = sqlalchemy.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
|
||||||
|
|
||||||
class PGVector(VectorStore):
|
class PGVector(VectorStore):
|
||||||
"""VectorStore implementation using Postgres and pgvector.
|
"""`Postgres`/`PGVector` vector store.
|
||||||
|
|
||||||
To use, you should have the ``pgvector`` python package installed.
|
To use, you should have the ``pgvector`` python package installed.
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Pinecone vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -17,7 +16,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class Pinecone(VectorStore):
|
class Pinecone(VectorStore):
|
||||||
"""Wrapper around Pinecone vector database.
|
"""`Pinecone` vector store.
|
||||||
|
|
||||||
To use, you should have the ``pinecone-client`` python package installed.
|
To use, you should have the ``pinecone-client`` python package installed.
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Qdrant vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
@ -40,7 +39,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
class QdrantException(Exception):
|
class QdrantException(Exception):
|
||||||
"""Base class for all the Qdrant related exceptions"""
|
"""`Qdrant` related exceptions."""
|
||||||
|
|
||||||
|
|
||||||
def sync_call_fallback(method: Callable) -> Callable:
|
def sync_call_fallback(method: Callable) -> Callable:
|
||||||
@ -68,7 +67,7 @@ def sync_call_fallback(method: Callable) -> Callable:
|
|||||||
|
|
||||||
|
|
||||||
class Qdrant(VectorStore):
|
class Qdrant(VectorStore):
|
||||||
"""Wrapper around Qdrant vector database.
|
"""`Qdrant` vector store.
|
||||||
|
|
||||||
To use you should have the ``qdrant-client`` package installed.
|
To use you should have the ``qdrant-client`` package installed.
|
||||||
|
|
||||||
@ -102,7 +101,7 @@ class Qdrant(VectorStore):
|
|||||||
try:
|
try:
|
||||||
import qdrant_client
|
import qdrant_client
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import qdrant-client python package. "
|
"Could not import qdrant-client python package. "
|
||||||
"Please install it with `pip install qdrant-client`."
|
"Please install it with `pip install qdrant-client`."
|
||||||
)
|
)
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
"""Wrapper around Redis vector database."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -96,7 +94,7 @@ def _default_relevance_score(val: float) -> float:
|
|||||||
|
|
||||||
|
|
||||||
class Redis(VectorStore):
|
class Redis(VectorStore):
|
||||||
"""Wrapper around Redis vector database.
|
"""`Redis` vector store.
|
||||||
|
|
||||||
To use, you should have the ``redis`` python package installed.
|
To use, you should have the ``redis`` python package installed.
|
||||||
|
|
||||||
@ -184,7 +182,7 @@ class Redis(VectorStore):
|
|||||||
from redis.commands.search.field import TextField, VectorField
|
from redis.commands.search.field import TextField, VectorField
|
||||||
from redis.commands.search.indexDefinition import IndexDefinition, IndexType
|
from redis.commands.search.indexDefinition import IndexDefinition, IndexType
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import redis python package. "
|
"Could not import redis python package. "
|
||||||
"Please install it with `pip install redis`."
|
"Please install it with `pip install redis`."
|
||||||
)
|
)
|
||||||
@ -612,7 +610,7 @@ class Redis(VectorStore):
|
|||||||
|
|
||||||
|
|
||||||
class RedisVectorStoreRetriever(VectorStoreRetriever):
|
class RedisVectorStoreRetriever(VectorStoreRetriever):
|
||||||
"""Retriever for Redis VectorStore."""
|
"""Retriever for `Redis` vector store."""
|
||||||
|
|
||||||
vectorstore: Redis
|
vectorstore: Redis
|
||||||
"""Redis VectorStore."""
|
"""Redis VectorStore."""
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Rockset vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -13,7 +12,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class Rockset(VectorStore):
|
class Rockset(VectorStore):
|
||||||
"""Wrapper arpund Rockset vector database.
|
"""`Rockset` vector store.
|
||||||
|
|
||||||
To use, you should have the `rockset` python package installed. Note that to use
|
To use, you should have the `rockset` python package installed. Note that to use
|
||||||
this, the collection being used must already exist in your Rockset instance.
|
this, the collection being used must already exist in your Rockset instance.
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around ScaNN vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import operator
|
import operator
|
||||||
@ -25,7 +24,7 @@ def normalize(x: np.ndarray) -> np.ndarray:
|
|||||||
|
|
||||||
def dependable_scann_import() -> Any:
|
def dependable_scann_import() -> Any:
|
||||||
"""
|
"""
|
||||||
Import scann if available, otherwise raise error.
|
Import `scann` if available, otherwise raise error.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
import scann
|
import scann
|
||||||
@ -38,7 +37,7 @@ def dependable_scann_import() -> Any:
|
|||||||
|
|
||||||
|
|
||||||
class ScaNN(VectorStore):
|
class ScaNN(VectorStore):
|
||||||
"""Wrapper around ScaNN vector database.
|
"""`ScaNN` vector store.
|
||||||
|
|
||||||
To use, you should have the ``scann`` python package installed.
|
To use, you should have the ``scann`` python package installed.
|
||||||
|
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
"""Wrapper around SingleStore DB."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -35,8 +33,7 @@ ORDERING_DIRECTIVE: dict = {
|
|||||||
|
|
||||||
|
|
||||||
class SingleStoreDB(VectorStore):
|
class SingleStoreDB(VectorStore):
|
||||||
"""
|
"""`SingleStore DB` vector store.
|
||||||
This class serves as a Pythonic interface to the SingleStore DB database.
|
|
||||||
|
|
||||||
The prerequisite for using this class is the installation of the ``singlestoredb``
|
The prerequisite for using this class is the installation of the ``singlestoredb``
|
||||||
Python package.
|
Python package.
|
||||||
|
@ -21,7 +21,7 @@ DEFAULT_FETCH_K = 20 # Number of Documents to initially fetch during MMR search
|
|||||||
|
|
||||||
|
|
||||||
class BaseSerializer(ABC):
|
class BaseSerializer(ABC):
|
||||||
"""Abstract base class for saving and loading data."""
|
"""Base class for serializing data."""
|
||||||
|
|
||||||
def __init__(self, persist_path: str) -> None:
|
def __init__(self, persist_path: str) -> None:
|
||||||
self.persist_path = persist_path
|
self.persist_path = persist_path
|
||||||
@ -57,7 +57,7 @@ class JsonSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class BsonSerializer(BaseSerializer):
|
class BsonSerializer(BaseSerializer):
|
||||||
"""Serializes data in binary json using the bson python package."""
|
"""Serializes data in binary json using the `bson` python package."""
|
||||||
|
|
||||||
def __init__(self, persist_path: str) -> None:
|
def __init__(self, persist_path: str) -> None:
|
||||||
super().__init__(persist_path)
|
super().__init__(persist_path)
|
||||||
@ -77,7 +77,7 @@ class BsonSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ParquetSerializer(BaseSerializer):
|
class ParquetSerializer(BaseSerializer):
|
||||||
"""Serializes data in Apache Parquet format using the pyarrow package."""
|
"""Serializes data in `Apache Parquet` format using the `pyarrow` package."""
|
||||||
|
|
||||||
def __init__(self, persist_path: str) -> None:
|
def __init__(self, persist_path: str) -> None:
|
||||||
super().__init__(persist_path)
|
super().__init__(persist_path)
|
||||||
@ -125,8 +125,8 @@ class SKLearnVectorStoreException(RuntimeError):
|
|||||||
|
|
||||||
|
|
||||||
class SKLearnVectorStore(VectorStore):
|
class SKLearnVectorStore(VectorStore):
|
||||||
"""A simple in-memory vector store based on the scikit-learn library
|
"""Simple in-memory vector store based on the `scikit-learn` library
|
||||||
NearestNeighbors implementation."""
|
`NearestNeighbors` implementation."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
"""Wrapper around open source StarRocks VectorSearch capability."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -69,7 +67,7 @@ def get_named_result(connection: Any, query: str) -> List[dict[str, Any]]:
|
|||||||
|
|
||||||
|
|
||||||
class StarRocksSettings(BaseSettings):
|
class StarRocksSettings(BaseSettings):
|
||||||
"""StarRocks Client Configuration
|
"""StarRocks client configuration.
|
||||||
|
|
||||||
Attribute:
|
Attribute:
|
||||||
StarRocks_host (str) : An URL to connect to MyScale backend.
|
StarRocks_host (str) : An URL to connect to MyScale backend.
|
||||||
@ -121,7 +119,7 @@ class StarRocksSettings(BaseSettings):
|
|||||||
|
|
||||||
|
|
||||||
class StarRocks(VectorStore):
|
class StarRocks(VectorStore):
|
||||||
"""Wrapper around StarRocks vector database
|
"""`StarRocks` vector store.
|
||||||
|
|
||||||
You need a `pymysql` python package, and a valid account
|
You need a `pymysql` python package, and a valid account
|
||||||
to connect to StarRocks.
|
to connect to StarRocks.
|
||||||
|
@ -26,7 +26,9 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
class SupabaseVectorStore(VectorStore):
|
class SupabaseVectorStore(VectorStore):
|
||||||
"""VectorStore for a Supabase postgres database. Assumes you have the `pgvector`
|
"""`Supabase Postgres` vector store.
|
||||||
|
|
||||||
|
It assumes you have the `pgvector`
|
||||||
extension installed and a `match_documents` (or similar) function. For more details:
|
extension installed and a `match_documents` (or similar) function. For more details:
|
||||||
https://integrations.langchain.com/vectorstores?integration_name=SupabaseVectorStore
|
https://integrations.langchain.com/vectorstores?integration_name=SupabaseVectorStore
|
||||||
|
|
||||||
@ -92,7 +94,7 @@ class SupabaseVectorStore(VectorStore):
|
|||||||
try:
|
try:
|
||||||
import supabase # noqa: F401
|
import supabase # noqa: F401
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import supabase python package. "
|
"Could not import supabase python package. "
|
||||||
"Please install it with `pip install supabase`."
|
"Please install it with `pip install supabase`."
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Tair Vector."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -19,7 +18,7 @@ def _uuid_key() -> str:
|
|||||||
|
|
||||||
|
|
||||||
class Tair(VectorStore):
|
class Tair(VectorStore):
|
||||||
"""Wrapper around Tair Vector store."""
|
"""`Tair` vector store."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -15,12 +15,14 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
class Tigris(VectorStore):
|
class Tigris(VectorStore):
|
||||||
|
"""`Tigris` vector store."""
|
||||||
|
|
||||||
def __init__(self, client: TigrisClient, embeddings: Embeddings, index_name: str):
|
def __init__(self, client: TigrisClient, embeddings: Embeddings, index_name: str):
|
||||||
"""Initialize Tigris vector store"""
|
"""Initialize Tigris vector store."""
|
||||||
try:
|
try:
|
||||||
import tigrisdb # noqa: F401
|
import tigrisdb # noqa: F401
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import tigrisdb python package. "
|
"Could not import tigrisdb python package. "
|
||||||
"Please install it with `pip install tigrisdb`"
|
"Please install it with `pip install tigrisdb`"
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Typesense vector search"""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
@ -15,7 +14,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
class Typesense(VectorStore):
|
class Typesense(VectorStore):
|
||||||
"""Wrapper around Typesense vector search.
|
"""`Typesense` vector store.
|
||||||
|
|
||||||
To use, you should have the ``typesense`` python package installed.
|
To use, you should have the ``typesense`` python package installed.
|
||||||
|
|
||||||
@ -61,7 +60,7 @@ class Typesense(VectorStore):
|
|||||||
try:
|
try:
|
||||||
from typesense import Client
|
from typesense import Client
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import typesense python package. "
|
"Could not import typesense python package. "
|
||||||
"Please install it with `pip install typesense`."
|
"Please install it with `pip install typesense`."
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around USearch vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Tuple
|
from typing import Any, Dict, Iterable, List, Optional, Tuple
|
||||||
@ -27,7 +26,8 @@ def dependable_usearch_import() -> Any:
|
|||||||
|
|
||||||
|
|
||||||
class USearch(VectorStore):
|
class USearch(VectorStore):
|
||||||
"""Wrapper around USearch vector database.
|
"""`USearch` vector store.
|
||||||
|
|
||||||
To use, you should have the ``usearch`` python package installed.
|
To use, you should have the ``usearch`` python package installed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around Vectara vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -18,7 +17,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class Vectara(VectorStore):
|
class Vectara(VectorStore):
|
||||||
"""Implementation of Vector Store using Vectara.
|
"""`Vectara API` vector store.
|
||||||
|
|
||||||
See (https://vectara.com).
|
See (https://vectara.com).
|
||||||
|
|
||||||
@ -426,7 +425,7 @@ class Vectara(VectorStore):
|
|||||||
|
|
||||||
|
|
||||||
class VectaraRetriever(VectorStoreRetriever):
|
class VectaraRetriever(VectorStoreRetriever):
|
||||||
"""Retriever class for Vectara."""
|
"""Retriever class for `Vectara`."""
|
||||||
|
|
||||||
vectorstore: Vectara
|
vectorstore: Vectara
|
||||||
"""Vectara vectorstore."""
|
"""Vectara vectorstore."""
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
"""Wrapper around weaviate vector database."""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
@ -44,7 +43,7 @@ def _create_weaviate_client(**kwargs: Any) -> Any:
|
|||||||
try:
|
try:
|
||||||
import weaviate
|
import weaviate
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import weaviate python package. "
|
"Could not import weaviate python package. "
|
||||||
"Please install it with `pip install weaviate-client`"
|
"Please install it with `pip install weaviate-client`"
|
||||||
)
|
)
|
||||||
@ -70,7 +69,7 @@ def _json_serializable(value: Any) -> Any:
|
|||||||
|
|
||||||
|
|
||||||
class Weaviate(VectorStore):
|
class Weaviate(VectorStore):
|
||||||
"""Wrapper around Weaviate vector database.
|
"""`Weaviate` vector store.
|
||||||
|
|
||||||
To use, you should have the ``weaviate-client`` python package installed.
|
To use, you should have the ``weaviate-client`` python package installed.
|
||||||
|
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
"""Wrapper around Xata as a vector database."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import time
|
import time
|
||||||
@ -12,7 +10,9 @@ from langchain.vectorstores.base import VectorStore
|
|||||||
|
|
||||||
|
|
||||||
class XataVectorStore(VectorStore):
|
class XataVectorStore(VectorStore):
|
||||||
"""VectorStore for a Xata database. Assumes you have a Xata database
|
"""`Xata` vector store.
|
||||||
|
|
||||||
|
It assumes you have a Xata database
|
||||||
created with the right schema. See the guide at:
|
created with the right schema. See the guide at:
|
||||||
https://integrations.langchain.com/vectorstores?integration_name=XataVectorStore
|
https://integrations.langchain.com/vectorstores?integration_name=XataVectorStore
|
||||||
|
|
||||||
@ -29,7 +29,7 @@ class XataVectorStore(VectorStore):
|
|||||||
try:
|
try:
|
||||||
from xata.client import XataClient # noqa: F401
|
from xata.client import XataClient # noqa: F401
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ImportError(
|
||||||
"Could not import xata python package. "
|
"Could not import xata python package. "
|
||||||
"Please install it with `pip install xata`."
|
"Please install it with `pip install xata`."
|
||||||
)
|
)
|
||||||
|
@ -22,8 +22,7 @@ logger = logging.getLogger()
|
|||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class CollectionConfig:
|
class CollectionConfig:
|
||||||
"""
|
"""Configuration for a `Zep Collection`.
|
||||||
A configuration class for a Zep Collection.
|
|
||||||
|
|
||||||
If the collection does not exist, it will be created.
|
If the collection does not exist, it will be created.
|
||||||
|
|
||||||
@ -46,9 +45,9 @@ class CollectionConfig:
|
|||||||
|
|
||||||
|
|
||||||
class ZepVectorStore(VectorStore):
|
class ZepVectorStore(VectorStore):
|
||||||
"""
|
"""`Zep` vector store.
|
||||||
ZepVectorStore is a VectorStore implementation that uses the Zep long-term memory
|
|
||||||
store as a backend. It provides methods for adding texts or documents to the store,
|
It provides methods for adding texts or documents to the store,
|
||||||
searching for similar documents, and deleting documents.
|
searching for similar documents, and deleting documents.
|
||||||
|
|
||||||
Search scores are calculated using cosine similarity normalized to [0, 1].
|
Search scores are calculated using cosine similarity normalized to [0, 1].
|
||||||
|
@ -10,9 +10,9 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class Zilliz(Milvus):
|
class Zilliz(Milvus):
|
||||||
"""Initialize wrapper around the Zilliz vector database.
|
"""`Zilliz` vector store.
|
||||||
|
|
||||||
In order to use this you need to have `pymilvus` installed and a
|
You need to have `pymilvus` installed and a
|
||||||
running Zilliz database.
|
running Zilliz database.
|
||||||
|
|
||||||
See the following documentation for how to run a Zilliz instance:
|
See the following documentation for how to run a Zilliz instance:
|
||||||
|
Loading…
Reference in New Issue
Block a user