mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-04 04:07:54 +00:00
Harrison/quadrant (#665)
Co-authored-by: Kacper Łukawski <kacperlukawski@users.noreply.github.com>
This commit is contained in:
parent
983b73f47c
commit
0b204d8c21
@ -16,7 +16,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 1,
|
||||
"id": "965eecee",
|
||||
"metadata": {
|
||||
"pycharm": {
|
||||
@ -27,12 +27,12 @@
|
||||
"source": [
|
||||
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
||||
"from langchain.text_splitter import CharacterTextSplitter\n",
|
||||
"from langchain.vectorstores import ElasticVectorSearch, Pinecone, Weaviate, FAISS"
|
||||
"from langchain.vectorstores import ElasticVectorSearch, Pinecone, Weaviate, FAISS, Qdrant"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"execution_count": 2,
|
||||
"id": "68481687",
|
||||
"metadata": {
|
||||
"pycharm": {
|
||||
@ -514,10 +514,62 @@
|
||||
"docs[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "9b852079",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Qdrant"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "e7d74bd2",
|
||||
"id": "e5ec70ce",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"host = \"<---host name here --->\"\n",
|
||||
"api_key = \"<---api key here--->\"\n",
|
||||
"qdrant = Qdrant.from_texts(texts, embeddings, host=host, prefer_grpc=True, api_key=api_key)\n",
|
||||
"query = \"What did the president say about Ketanji Brown Jackson\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 21,
|
||||
"id": "9805ad1f",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"docs = qdrant.similarity_search(query)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 22,
|
||||
"id": "bd097a0e",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"Document(page_content='In state after state, new laws have been passed, not only to suppress the vote, but to subvert entire elections. \\n\\nWe cannot let this happen. \\n\\nTonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass the Disclose Act so Americans can know who is funding our elections. \\n\\nTonight, I’d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyer—an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \\n\\nOne of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \\n\\nAnd I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nation’s top legal minds, who will continue Justice Breyer’s legacy of excellence.', lookup_str='', metadata={}, lookup_index=0)"
|
||||
]
|
||||
},
|
||||
"execution_count": 22,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"docs[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "8ffd66e2",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
|
@ -3,6 +3,14 @@ from langchain.vectorstores.base import VectorStore
|
||||
from langchain.vectorstores.elastic_vector_search import ElasticVectorSearch
|
||||
from langchain.vectorstores.faiss import FAISS
|
||||
from langchain.vectorstores.pinecone import Pinecone
|
||||
from langchain.vectorstores.qdrant import Qdrant
|
||||
from langchain.vectorstores.weaviate import Weaviate
|
||||
|
||||
__all__ = ["ElasticVectorSearch", "FAISS", "VectorStore", "Pinecone", "Weaviate"]
|
||||
__all__ = [
|
||||
"ElasticVectorSearch",
|
||||
"FAISS",
|
||||
"VectorStore",
|
||||
"Pinecone",
|
||||
"Weaviate",
|
||||
"Qdrant",
|
||||
]
|
||||
|
225
langchain/vectorstores/qdrant.py
Normal file
225
langchain/vectorstores/qdrant.py
Normal file
@ -0,0 +1,225 @@
|
||||
"""Wrapper around Qdrant vector database."""
|
||||
import uuid
|
||||
from operator import itemgetter
|
||||
from typing import Any, Callable, Iterable, List, Optional, Tuple
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.embeddings.base import Embeddings
|
||||
from langchain.utils import get_from_dict_or_env
|
||||
from langchain.vectorstores import VectorStore
|
||||
from langchain.vectorstores.utils import maximal_marginal_relevance
|
||||
|
||||
|
||||
class Qdrant(VectorStore):
|
||||
"""Wrapper around Qdrant vector database.
|
||||
|
||||
To use you should have the ``qdrant-client`` package installed.
|
||||
|
||||
Example:
|
||||
.. code-block:: python
|
||||
|
||||
from langchain import Qdrant
|
||||
|
||||
client = QdrantClient()
|
||||
collection_name = "MyCollection"
|
||||
qdrant = Qdrant(client, collection_name, embedding_function)
|
||||
"""
|
||||
|
||||
def __init__(self, client: Any, collection_name: str, embedding_function: Callable):
|
||||
"""Initialize with necessary components."""
|
||||
try:
|
||||
import qdrant_client
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import qdrant-client python package. "
|
||||
"Please it install it with `pip install qdrant-client`."
|
||||
)
|
||||
|
||||
if not isinstance(client, qdrant_client.QdrantClient):
|
||||
raise ValueError(
|
||||
f"client should be an instance of qdrant_client.QdrantClient, "
|
||||
f"got {type(client)}"
|
||||
)
|
||||
|
||||
self.client: qdrant_client.QdrantClient = client
|
||||
self.collection_name = collection_name
|
||||
self.embedding_function = embedding_function
|
||||
|
||||
def add_texts(
|
||||
self, texts: Iterable[str], metadatas: Optional[List[dict]] = None
|
||||
) -> List[str]:
|
||||
"""Run more texts through the embeddings and add to the vectorstore.
|
||||
|
||||
Args:
|
||||
texts: Iterable of strings to add to the vectorstore.
|
||||
metadatas: Optional list of metadatas associated with the texts.
|
||||
|
||||
Returns:
|
||||
List of ids from adding the texts into the vectorstore.
|
||||
"""
|
||||
from qdrant_client.http import models as rest
|
||||
|
||||
ids = [uuid.uuid4().hex for _ in texts]
|
||||
self.client.upsert(
|
||||
collection_name=self.collection_name,
|
||||
points=rest.Batch(
|
||||
ids=ids,
|
||||
vectors=[self.embedding_function(text) for text in texts],
|
||||
payloads=self._build_payloads(texts, metadatas),
|
||||
),
|
||||
)
|
||||
|
||||
return ids
|
||||
|
||||
def similarity_search(
|
||||
self, query: str, k: int = 4, **kwargs: Any
|
||||
) -> List[Document]:
|
||||
"""Return docs most similar to query.
|
||||
|
||||
Args:
|
||||
query: Text to look up documents similar to.
|
||||
k: Number of Documents to return. Defaults to 4.
|
||||
|
||||
Returns:
|
||||
List of Documents most similar to the query.
|
||||
"""
|
||||
results = self.similarity_search_with_score(query, k)
|
||||
return list(map(itemgetter(0), results))
|
||||
|
||||
def similarity_search_with_score(
|
||||
self, query: str, k: int = 4
|
||||
) -> List[Tuple[Document, float]]:
|
||||
"""Return docs most similar to query.
|
||||
|
||||
Args:
|
||||
query: Text to look up documents similar to.
|
||||
k: Number of Documents to return. Defaults to 4.
|
||||
|
||||
Returns:
|
||||
List of Documents most similar to the query and score for each
|
||||
"""
|
||||
embedding = self.embedding_function(query)
|
||||
results = self.client.search(
|
||||
collection_name=self.collection_name,
|
||||
query_vector=embedding,
|
||||
with_payload=True,
|
||||
limit=k,
|
||||
)
|
||||
return [
|
||||
(
|
||||
self._document_from_scored_point(result),
|
||||
result.score,
|
||||
)
|
||||
for result in results
|
||||
]
|
||||
|
||||
def max_marginal_relevance_search(
|
||||
self, query: str, k: int = 4, fetch_k: int = 20
|
||||
) -> List[Document]:
|
||||
"""Return docs selected using the maximal marginal relevance.
|
||||
|
||||
Maximal marginal relevance optimizes for similarity to query AND diversity
|
||||
among selected documents.
|
||||
|
||||
Args:
|
||||
query: Text to look up documents similar to.
|
||||
k: Number of Documents to return. Defaults to 4.
|
||||
fetch_k: Number of Documents to fetch to pass to MMR algorithm.
|
||||
|
||||
Returns:
|
||||
List of Documents selected by maximal marginal relevance.
|
||||
"""
|
||||
embedding = self.embedding_function(query)
|
||||
results = self.client.search(
|
||||
collection_name=self.collection_name,
|
||||
query_vector=embedding,
|
||||
with_payload=True,
|
||||
with_vectors=True,
|
||||
limit=k,
|
||||
)
|
||||
embeddings = [result.vector for result in results]
|
||||
mmr_selected = maximal_marginal_relevance(embedding, embeddings, k=k)
|
||||
return [self._document_from_scored_point(results[i]) for i in mmr_selected]
|
||||
|
||||
@classmethod
|
||||
def from_texts(
|
||||
cls,
|
||||
texts: List[str],
|
||||
embedding: Embeddings,
|
||||
metadatas: Optional[List[dict]] = None,
|
||||
**kwargs: Any,
|
||||
) -> "Qdrant":
|
||||
"""Construct Qdrant wrapper from raw documents.
|
||||
|
||||
This is a user friendly interface that:
|
||||
1. Embeds documents.
|
||||
2. Creates an in memory docstore
|
||||
3. Initializes the Qdrant database
|
||||
|
||||
This is intended to be a quick way to get started.
|
||||
|
||||
Example:
|
||||
.. code-block:: python
|
||||
|
||||
from langchain import Qdrant
|
||||
from langchain.embeddings import OpenAIEmbeddings
|
||||
embeddings = OpenAIEmbeddings()
|
||||
qdrant = Qdrant.from_texts(texts, embeddings)
|
||||
"""
|
||||
try:
|
||||
import qdrant_client
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import qdrant-client python package. "
|
||||
"Please it install it with `pip install qdrant-client`."
|
||||
)
|
||||
|
||||
from qdrant_client.http import models as rest
|
||||
|
||||
# Just do a single quick embedding to get vector size
|
||||
embeddings = embedding.embed_documents(texts[:1])
|
||||
vector_size = len(embeddings[0])
|
||||
|
||||
qdrant_host = get_from_dict_or_env(kwargs, "host", "QDRANT_HOST")
|
||||
kwargs.pop("host")
|
||||
client = qdrant_client.QdrantClient(host=qdrant_host, **kwargs)
|
||||
|
||||
collection_name = kwargs.get("collection_name", uuid.uuid4().hex)
|
||||
distance_func = kwargs.pop("distance_func", "Cosine").upper()
|
||||
client.recreate_collection(
|
||||
collection_name=collection_name,
|
||||
vectors_config=rest.VectorParams(
|
||||
size=vector_size,
|
||||
distance=rest.Distance[distance_func],
|
||||
),
|
||||
)
|
||||
|
||||
client.upsert(
|
||||
collection_name=collection_name,
|
||||
points=rest.Batch(
|
||||
ids=[uuid.uuid4().hex for _ in texts],
|
||||
vectors=embeddings,
|
||||
payloads=cls._build_payloads(texts, metadatas),
|
||||
),
|
||||
)
|
||||
|
||||
return cls(client, collection_name, embedding.embed_query)
|
||||
|
||||
@classmethod
|
||||
def _build_payloads(
|
||||
cls, texts: Iterable[str], metadatas: Optional[List[dict]]
|
||||
) -> List[dict]:
|
||||
return [
|
||||
{
|
||||
"page_content": text,
|
||||
"metadata": metadatas[i] if metadatas is not None else None,
|
||||
}
|
||||
for i, text in enumerate(texts)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def _document_from_scored_point(cls, scored_point: Any) -> Document:
|
||||
return Document(
|
||||
page_content=scored_point.payload.get("page_content"),
|
||||
metadata=scored_point.payload.get("metadata") or {},
|
||||
)
|
1043
poetry.lock
generated
1043
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -30,6 +30,7 @@ pinecone-client = {version = "^2", optional = true}
|
||||
weaviate-client = {version = "^3", optional = true}
|
||||
google-api-python-client = {version = "2.70.0", optional = true}
|
||||
wolframalpha = {version = "5.0.0", optional = true}
|
||||
qdrant-client = {version = "^0.11.7", optional = true}
|
||||
|
||||
[tool.poetry.group.docs.dependencies]
|
||||
autodoc_pydantic = "^1.8.0"
|
||||
@ -74,7 +75,7 @@ playwright = "^1.28.0"
|
||||
|
||||
[tool.poetry.extras]
|
||||
llms = ["cohere", "openai", "nlpcloud", "huggingface_hub", "manifest-ml", "torch", "transformers"]
|
||||
all = ["cohere", "openai", "nlpcloud", "huggingface_hub", "manifest-ml", "elasticsearch", "google-search-results", "faiss-cpu", "sentence_transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken", "torch", "jinja2", "pinecone-client", "weaviate-client", "redis", "google-api-python-client", "wolframalpha"]
|
||||
all = ["cohere", "openai", "nlpcloud", "huggingface_hub", "manifest-ml", "elasticsearch", "google-search-results", "faiss-cpu", "sentence_transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken", "torch", "jinja2", "pinecone-client", "weaviate-client", "redis", "google-api-python-client", "wolframalpha", "qdrant-client"]
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
57
tests/integration_tests/vectorstores/test_qdrant.py
Normal file
57
tests/integration_tests/vectorstores/test_qdrant.py
Normal file
@ -0,0 +1,57 @@
|
||||
"""Test Qdrant functionality."""
|
||||
from typing import List
|
||||
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.embeddings.base import Embeddings
|
||||
from langchain.vectorstores import Qdrant
|
||||
|
||||
|
||||
class FakeEmbeddings(Embeddings):
|
||||
"""Fake embeddings functionality for testing."""
|
||||
|
||||
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
||||
"""Return simple embeddings."""
|
||||
return [[1.0] * 9 + [float(i)] for i in range(len(texts))]
|
||||
|
||||
def embed_query(self, text: str) -> List[float]:
|
||||
"""Return simple embeddings."""
|
||||
return [1.0] * 9 + [0.0]
|
||||
|
||||
|
||||
def test_qdrant() -> None:
|
||||
"""Test end to end construction and search."""
|
||||
texts = ["foo", "bar", "baz"]
|
||||
docsearch = Qdrant.from_texts(texts, FakeEmbeddings(), host="localhost")
|
||||
output = docsearch.similarity_search("foo", k=1)
|
||||
assert output == [Document(page_content="foo")]
|
||||
|
||||
|
||||
def test_qdrant_with_metadatas() -> None:
|
||||
"""Test end to end construction and search."""
|
||||
texts = ["foo", "bar", "baz"]
|
||||
metadatas = [{"page": i} for i in range(len(texts))]
|
||||
docsearch = Qdrant.from_texts(
|
||||
texts,
|
||||
FakeEmbeddings(),
|
||||
metadatas=metadatas,
|
||||
host="localhost",
|
||||
)
|
||||
output = docsearch.similarity_search("foo", k=1)
|
||||
assert output == [Document(page_content="foo", metadata={"page": 0})]
|
||||
|
||||
|
||||
def test_qdrant_max_marginal_relevance_search() -> None:
|
||||
"""Test end to end construction and MRR search."""
|
||||
texts = ["foo", "bar", "baz"]
|
||||
metadatas = [{"page": i} for i in range(len(texts))]
|
||||
docsearch = Qdrant.from_texts(
|
||||
texts,
|
||||
FakeEmbeddings(),
|
||||
metadatas=metadatas,
|
||||
host="localhost",
|
||||
)
|
||||
output = docsearch.max_marginal_relevance_search("foo", k=2, fetch_k=3)
|
||||
assert output == [
|
||||
Document(page_content="foo", metadata={"page": 0}),
|
||||
Document(page_content="bar", metadata={"page": 1}),
|
||||
]
|
Loading…
Reference in New Issue
Block a user