mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-23 03:19:38 +00:00
partners[lint]: run pyupgrade
to get code in line with 3.9 standards (#30781)
Using `pyupgrade` to get all `partners` code up to 3.9 standards (mostly, fixing old `typing` imports).
This commit is contained in:
@@ -8,17 +8,12 @@ from __future__ import annotations
|
||||
import base64
|
||||
import logging
|
||||
import uuid
|
||||
from collections.abc import Iterable, Sequence
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
@@ -37,11 +32,11 @@ logger = logging.getLogger()
|
||||
DEFAULT_K = 4 # Number of Documents to return.
|
||||
|
||||
|
||||
def _results_to_docs(results: Any) -> List[Document]:
|
||||
def _results_to_docs(results: Any) -> list[Document]:
|
||||
return [doc for doc, _ in _results_to_docs_and_scores(results)]
|
||||
|
||||
|
||||
def _results_to_docs_and_scores(results: Any) -> List[Tuple[Document, float]]:
|
||||
def _results_to_docs_and_scores(results: Any) -> list[tuple[Document, float]]:
|
||||
return [
|
||||
# TODO: Chroma can do batch querying,
|
||||
# we shouldn't hard code to the 1st result
|
||||
@@ -58,7 +53,7 @@ def _results_to_docs_and_scores(results: Any) -> List[Tuple[Document, float]]:
|
||||
]
|
||||
|
||||
|
||||
def _results_to_docs_and_vectors(results: Any) -> List[Tuple[Document, np.ndarray]]:
|
||||
def _results_to_docs_and_vectors(results: Any) -> list[tuple[Document, np.ndarray]]:
|
||||
return [
|
||||
(Document(page_content=result[0], metadata=result[1] or {}), result[2])
|
||||
for result in zip(
|
||||
@@ -69,7 +64,7 @@ def _results_to_docs_and_vectors(results: Any) -> List[Tuple[Document, np.ndarra
|
||||
]
|
||||
|
||||
|
||||
Matrix = Union[List[List[float]], List[np.ndarray], np.ndarray]
|
||||
Matrix = Union[list[list[float]], list[np.ndarray], np.ndarray]
|
||||
|
||||
|
||||
def cosine_similarity(X: Matrix, Y: Matrix) -> np.ndarray:
|
||||
@@ -104,7 +99,7 @@ def maximal_marginal_relevance(
|
||||
embedding_list: list,
|
||||
lambda_mult: float = 0.5,
|
||||
k: int = 4,
|
||||
) -> List[int]:
|
||||
) -> list[int]:
|
||||
"""Calculate maximal marginal relevance.
|
||||
|
||||
Args:
|
||||
@@ -287,7 +282,7 @@ class Chroma(VectorStore):
|
||||
embedding_function: Optional[Embeddings] = None,
|
||||
persist_directory: Optional[str] = None,
|
||||
client_settings: Optional[chromadb.config.Settings] = None,
|
||||
collection_metadata: Optional[Dict] = None,
|
||||
collection_metadata: Optional[dict] = None,
|
||||
client: Optional[chromadb.ClientAPI] = None,
|
||||
relevance_score_fn: Optional[Callable[[float], float]] = None,
|
||||
create_collection_if_not_exists: Optional[bool] = True,
|
||||
@@ -370,13 +365,13 @@ class Chroma(VectorStore):
|
||||
@xor_args(("query_texts", "query_embeddings"))
|
||||
def __query_collection(
|
||||
self,
|
||||
query_texts: Optional[List[str]] = None,
|
||||
query_embeddings: Optional[List[List[float]]] = None,
|
||||
query_texts: Optional[list[str]] = None,
|
||||
query_embeddings: Optional[list[list[float]]] = None,
|
||||
n_results: int = 4,
|
||||
where: Optional[Dict[str, str]] = None,
|
||||
where_document: Optional[Dict[str, str]] = None,
|
||||
where: Optional[dict[str, str]] = None,
|
||||
where_document: Optional[dict[str, str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> Union[List[Document], chromadb.QueryResult]:
|
||||
) -> Union[list[Document], chromadb.QueryResult]:
|
||||
"""Query the chroma collection.
|
||||
|
||||
Args:
|
||||
@@ -411,11 +406,11 @@ class Chroma(VectorStore):
|
||||
|
||||
def add_images(
|
||||
self,
|
||||
uris: List[str],
|
||||
metadatas: Optional[List[dict]] = None,
|
||||
ids: Optional[List[str]] = None,
|
||||
uris: list[str],
|
||||
metadatas: Optional[list[dict]] = None,
|
||||
ids: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""Run more images through the embeddings and add to the vectorstore.
|
||||
|
||||
Args:
|
||||
@@ -502,10 +497,10 @@ class Chroma(VectorStore):
|
||||
def add_texts(
|
||||
self,
|
||||
texts: Iterable[str],
|
||||
metadatas: Optional[List[dict]] = None,
|
||||
ids: Optional[List[str]] = None,
|
||||
metadatas: Optional[list[dict]] = None,
|
||||
ids: Optional[list[str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""Run more texts through the embeddings and add to the vectorstore.
|
||||
|
||||
Args:
|
||||
@@ -591,9 +586,9 @@ class Chroma(VectorStore):
|
||||
self,
|
||||
query: str,
|
||||
k: int = DEFAULT_K,
|
||||
filter: Optional[Dict[str, str]] = None,
|
||||
filter: Optional[dict[str, str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
"""Run similarity search with Chroma.
|
||||
|
||||
Args:
|
||||
@@ -612,12 +607,12 @@ class Chroma(VectorStore):
|
||||
|
||||
def similarity_search_by_vector(
|
||||
self,
|
||||
embedding: List[float],
|
||||
embedding: list[float],
|
||||
k: int = DEFAULT_K,
|
||||
filter: Optional[Dict[str, str]] = None,
|
||||
where_document: Optional[Dict[str, str]] = None,
|
||||
filter: Optional[dict[str, str]] = None,
|
||||
where_document: Optional[dict[str, str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
"""Return docs most similar to embedding vector.
|
||||
|
||||
Args:
|
||||
@@ -642,12 +637,12 @@ class Chroma(VectorStore):
|
||||
|
||||
def similarity_search_by_vector_with_relevance_scores(
|
||||
self,
|
||||
embedding: List[float],
|
||||
embedding: list[float],
|
||||
k: int = DEFAULT_K,
|
||||
filter: Optional[Dict[str, str]] = None,
|
||||
where_document: Optional[Dict[str, str]] = None,
|
||||
filter: Optional[dict[str, str]] = None,
|
||||
where_document: Optional[dict[str, str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[Tuple[Document, float]]:
|
||||
) -> list[tuple[Document, float]]:
|
||||
"""Return docs most similar to embedding vector and similarity score.
|
||||
|
||||
Args:
|
||||
@@ -675,10 +670,10 @@ class Chroma(VectorStore):
|
||||
self,
|
||||
query: str,
|
||||
k: int = DEFAULT_K,
|
||||
filter: Optional[Dict[str, str]] = None,
|
||||
where_document: Optional[Dict[str, str]] = None,
|
||||
filter: Optional[dict[str, str]] = None,
|
||||
where_document: Optional[dict[str, str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[Tuple[Document, float]]:
|
||||
) -> list[tuple[Document, float]]:
|
||||
"""Run similarity search with Chroma with distance.
|
||||
|
||||
Args:
|
||||
@@ -717,10 +712,10 @@ class Chroma(VectorStore):
|
||||
self,
|
||||
query: str,
|
||||
k: int = DEFAULT_K,
|
||||
filter: Optional[Dict[str, str]] = None,
|
||||
where_document: Optional[Dict[str, str]] = None,
|
||||
filter: Optional[dict[str, str]] = None,
|
||||
where_document: Optional[dict[str, str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[Tuple[Document, np.ndarray]]:
|
||||
) -> list[tuple[Document, np.ndarray]]:
|
||||
"""Run similarity search with Chroma with vectors.
|
||||
|
||||
Args:
|
||||
@@ -800,9 +795,9 @@ class Chroma(VectorStore):
|
||||
self,
|
||||
uri: str,
|
||||
k: int = DEFAULT_K,
|
||||
filter: Optional[Dict[str, str]] = None,
|
||||
filter: Optional[dict[str, str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
"""Search for similar images based on the given image URI.
|
||||
|
||||
Args:
|
||||
@@ -844,9 +839,9 @@ class Chroma(VectorStore):
|
||||
self,
|
||||
uri: str,
|
||||
k: int = DEFAULT_K,
|
||||
filter: Optional[Dict[str, str]] = None,
|
||||
filter: Optional[dict[str, str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[Tuple[Document, float]]:
|
||||
) -> list[tuple[Document, float]]:
|
||||
"""Search for similar images based on the given image URI.
|
||||
|
||||
Args:
|
||||
@@ -886,14 +881,14 @@ class Chroma(VectorStore):
|
||||
|
||||
def max_marginal_relevance_search_by_vector(
|
||||
self,
|
||||
embedding: List[float],
|
||||
embedding: list[float],
|
||||
k: int = DEFAULT_K,
|
||||
fetch_k: int = 20,
|
||||
lambda_mult: float = 0.5,
|
||||
filter: Optional[Dict[str, str]] = None,
|
||||
where_document: Optional[Dict[str, str]] = None,
|
||||
filter: Optional[dict[str, str]] = None,
|
||||
where_document: Optional[dict[str, str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
"""Return docs selected using the maximal marginal relevance.
|
||||
|
||||
Maximal marginal relevance optimizes for similarity to query AND diversity
|
||||
@@ -942,10 +937,10 @@ class Chroma(VectorStore):
|
||||
k: int = DEFAULT_K,
|
||||
fetch_k: int = 20,
|
||||
lambda_mult: float = 0.5,
|
||||
filter: Optional[Dict[str, str]] = None,
|
||||
where_document: Optional[Dict[str, str]] = None,
|
||||
filter: Optional[dict[str, str]] = None,
|
||||
where_document: Optional[dict[str, str]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
"""Return docs selected using the maximal marginal relevance.
|
||||
|
||||
Maximal marginal relevance optimizes for similarity to query AND diversity
|
||||
@@ -1005,8 +1000,8 @@ class Chroma(VectorStore):
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
where_document: Optional[WhereDocument] = None,
|
||||
include: Optional[List[str]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
include: Optional[list[str]] = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Gets the collection.
|
||||
|
||||
Args:
|
||||
@@ -1081,7 +1076,7 @@ class Chroma(VectorStore):
|
||||
return self.update_documents([document_id], [document])
|
||||
|
||||
# type: ignore
|
||||
def update_documents(self, ids: List[str], documents: List[Document]) -> None:
|
||||
def update_documents(self, ids: list[str], documents: list[Document]) -> None:
|
||||
"""Update a document in the collection.
|
||||
|
||||
Args:
|
||||
@@ -1129,16 +1124,16 @@ class Chroma(VectorStore):
|
||||
|
||||
@classmethod
|
||||
def from_texts(
|
||||
cls: Type[Chroma],
|
||||
texts: List[str],
|
||||
cls: type[Chroma],
|
||||
texts: list[str],
|
||||
embedding: Optional[Embeddings] = None,
|
||||
metadatas: Optional[List[dict]] = None,
|
||||
ids: Optional[List[str]] = None,
|
||||
metadatas: Optional[list[dict]] = None,
|
||||
ids: Optional[list[str]] = None,
|
||||
collection_name: str = _LANGCHAIN_DEFAULT_COLLECTION_NAME,
|
||||
persist_directory: Optional[str] = None,
|
||||
client_settings: Optional[chromadb.config.Settings] = None,
|
||||
client: Optional[chromadb.ClientAPI] = None,
|
||||
collection_metadata: Optional[Dict] = None,
|
||||
collection_metadata: Optional[dict] = None,
|
||||
**kwargs: Any,
|
||||
) -> Chroma:
|
||||
"""Create a Chroma vectorstore from a raw documents.
|
||||
@@ -1200,15 +1195,15 @@ class Chroma(VectorStore):
|
||||
|
||||
@classmethod
|
||||
def from_documents(
|
||||
cls: Type[Chroma],
|
||||
documents: List[Document],
|
||||
cls: type[Chroma],
|
||||
documents: list[Document],
|
||||
embedding: Optional[Embeddings] = None,
|
||||
ids: Optional[List[str]] = None,
|
||||
ids: Optional[list[str]] = None,
|
||||
collection_name: str = _LANGCHAIN_DEFAULT_COLLECTION_NAME,
|
||||
persist_directory: Optional[str] = None,
|
||||
client_settings: Optional[chromadb.config.Settings] = None,
|
||||
client: Optional[chromadb.ClientAPI] = None, # Add this line
|
||||
collection_metadata: Optional[Dict] = None,
|
||||
collection_metadata: Optional[dict] = None,
|
||||
**kwargs: Any,
|
||||
) -> Chroma:
|
||||
"""Create a Chroma vectorstore from a list of documents.
|
||||
@@ -1249,7 +1244,7 @@ class Chroma(VectorStore):
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def delete(self, ids: Optional[List[str]] = None, **kwargs: Any) -> None:
|
||||
def delete(self, ids: Optional[list[str]] = None, **kwargs: Any) -> None:
|
||||
"""Delete by vector IDs.
|
||||
|
||||
Args:
|
||||
|
Reference in New Issue
Block a user