Merge branch 'master' into nc/20dec/runnable-chain

This commit is contained in:
Bagatur
2023-12-21 10:14:02 -05:00
36 changed files with 7536 additions and 67 deletions

View File

@@ -26,7 +26,7 @@ class AzureChatOpenAI(ChatOpenAI):
In addition, you should have the ``openai`` python package installed, and the
following environment variables set or passed in constructor in lower case:
- ``AZURE_OPENAI_API_KEY``
- ``AZURE_OPENAI_API_ENDPOINT``
- ``AZURE_OPENAI_ENDPOINT``
- ``AZURE_OPENAI_AD_TOKEN``
- ``OPENAI_API_VERSION``
- ``OPENAI_PROXY``

View File

@@ -2,10 +2,7 @@ from __future__ import annotations
import json
import logging
from typing import TYPE_CHECKING, Any, List, Optional, Tuple
if TYPE_CHECKING:
from jaguardb_http_client.JaguarHttpClient import JaguarHttpClient
from typing import Any, List, Optional, Tuple
from langchain_core.documents import Document
from langchain_core.embeddings import Embeddings
@@ -23,7 +20,7 @@ class Jaguar(VectorStore):
Example:
.. code-block:: python
from langchain.vectorstores import Jaguar
from langchain_community.vectorstores.jaguar import Jaguar
vectorstore = Jaguar(
pod = 'vdb',
@@ -53,6 +50,13 @@ class Jaguar(VectorStore):
self._vector_dimension = vector_dimension
self._embedding = embedding
try:
from jaguardb_http_client.JaguarHttpClient import JaguarHttpClient
except ImportError:
raise ValueError(
"Could not import jaguardb-http-client python package. "
"Please install it with `pip install -U jaguardb-http-client`"
)
self._jag = JaguarHttpClient(url)
self._token = ""

View File

@@ -306,8 +306,13 @@ class MomentoVectorIndex(VectorStore):
if "top_k" in kwargs:
k = kwargs["k"]
filter_expression = kwargs.get("filter_expression", None)
response = self._client.search(
self.index_name, embedding, top_k=k, metadata_fields=ALL_METADATA
self.index_name,
embedding,
top_k=k,
metadata_fields=ALL_METADATA,
filter_expression=filter_expression,
)
if not isinstance(response, Search.Success):
@@ -366,8 +371,13 @@ class MomentoVectorIndex(VectorStore):
from momento.requests.vector_index import ALL_METADATA
from momento.responses.vector_index import SearchAndFetchVectors
filter_expression = kwargs.get("filter_expression", None)
response = self._client.search_and_fetch_vectors(
self.index_name, embedding, top_k=fetch_k, metadata_fields=ALL_METADATA
self.index_name,
embedding,
top_k=fetch_k,
metadata_fields=ALL_METADATA,
filter_expression=filter_expression,
)
if isinstance(response, SearchAndFetchVectors.Success):

View File

@@ -1,10 +1,12 @@
import os
import time
import uuid
from typing import Iterator, List
from typing import Generator, Iterator, List
import pytest
from langchain_core.documents import Document
from langchain_community.document_loaders import TextLoader
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.vectorstores import MomentoVectorIndex
@@ -24,6 +26,23 @@ def wait() -> None:
time.sleep(1)
@pytest.fixture(scope="module")
def embedding_openai() -> OpenAIEmbeddings:
if not os.environ.get("OPENAI_API_KEY"):
raise ValueError("OPENAI_API_KEY is not set")
return OpenAIEmbeddings()
@pytest.fixture(scope="function")
def texts() -> Generator[List[str], None, None]:
# Load the documents from a file located in the fixtures directory
documents = TextLoader(
os.path.join(os.path.dirname(__file__), "fixtures", "sharks.txt")
).load()
yield [doc.page_content for doc in documents]
@pytest.fixture(scope="function")
def vector_store(
embedding_openai: OpenAIEmbeddings, random_index_name: str

View File

@@ -10,7 +10,7 @@ def get_runtime_environment() -> dict:
return {
"library_version": __version__,
"library": "langchain",
"library": "langchain-core",
"platform": platform.platform(),
"runtime": "python",
"runtime_version": platform.python_version(),