diff --git a/docs/extras/modules/data_connection/caching_embeddings.ipynb b/docs/extras/modules/data_connection/caching_embeddings.ipynb index de549ea88b7..bf87f9e0523 100644 --- a/docs/extras/modules/data_connection/caching_embeddings.ipynb +++ b/docs/extras/modules/data_connection/caching_embeddings.ipynb @@ -9,7 +9,7 @@ "\n", "Embeddings can be stored or temporarily cached to avoid needing to recompute them.\n", "\n", - "Caching embeddings can be done using a `CacheBackedEmbedder`.\n", + "Caching embeddings can be done using a `CacheBackedEmbeddings`.\n", "\n", "The cache backed embedder is a wrapper around an embedder that caches\n", "embeddings in a key-value store. \n", @@ -17,7 +17,7 @@ "The text is hashed and the hash is used as the key in the cache.\n", "\n", "\n", - "The main supported way to initialized a `CacheBackedEmbedder` is `from_bytes_store`. This takes in the following parameters:\n", + "The main supported way to initialized a `CacheBackedEmbeddings` is `from_bytes_store`. This takes in the following parameters:\n", "\n", "- underlying_embedder: The embedder to use for embedding.\n", "- document_embedding_cache: The cache to use for storing document embeddings.\n", @@ -28,17 +28,200 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 4, "id": "a463c3c2-749b-40d1-a433-84f68a1cd1c7", "metadata": { "tags": [] }, "outputs": [], "source": [ - "from langchain.embeddings import CacheBackedEmbedder\n", - "from langchain.storage import InMemoryStore\n", - "from langchain.storage import LocalFileStore\n", - "from langchain.embeddings import OpenAIEmbeddings" + "from langchain.storage import InMemoryStore, LocalFileStore, RedisStore\n", + "from langchain.embeddings import OpenAIEmbeddings, CacheBackedEmbeddings" + ] + }, + { + "cell_type": "markdown", + "id": "9ddf07dd-3e72-41de-99d4-78e9521e272f", + "metadata": {}, + "source": [ + "## Using with a vectorstore\n", + "\n", + "First, let's see an example that uses the local file system for storing embeddings and uses FAISS vectorstore for retrieval." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "9e4314d8-88ef-4f52-81ae-0be771168bb6", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.document_loaders import TextLoader\n", + "from langchain.embeddings.openai import OpenAIEmbeddings\n", + "from langchain.text_splitter import CharacterTextSplitter\n", + "from langchain.vectorstores import FAISS" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "3e751f26-9b5b-4c10-843a-d784b5ea8538", + "metadata": {}, + "outputs": [], + "source": [ + "underlying_embeddings = OpenAIEmbeddings()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "30743664-38f5-425d-8216-772b64e7f348", + "metadata": {}, + "outputs": [], + "source": [ + "fs = LocalFileStore(\"./cache/\")\n", + "\n", + "cached_embedder = CacheBackedEmbeddings.from_bytes_store(\n", + " underlying_embeddings, fs, namespace=underlying_embeddings.model\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "f8cdf33c-321d-4d2c-b76b-d6f5f8b42a92", + "metadata": {}, + "source": [ + "The cache is empty prior to embedding" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "f9ad627f-ced2-4277-b336-2434f22f2c8a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "list(fs.yield_keys())" + ] + }, + { + "cell_type": "markdown", + "id": "a4effe04-b40f-42f8-a449-72fe6991cf20", + "metadata": {}, + "source": [ + "Load the document, split it into chunks, embed each chunk and load it into the vector store." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "cf958ac2-e60e-4668-b32c-8bb2d78b3c61", + "metadata": {}, + "outputs": [], + "source": [ + "raw_documents = TextLoader(\"../state_of_the_union.txt\").load()\n", + "text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)\n", + "documents = text_splitter.split_documents(raw_documents)" + ] + }, + { + "cell_type": "markdown", + "id": "f526444b-93f8-423f-b6d1-dab539450921", + "metadata": {}, + "source": [ + "create the vectorstore" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "3a1d7bb8-3b72-4bb5-9013-cf7729caca61", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 608 ms, sys: 58.9 ms, total: 667 ms\n", + "Wall time: 1.3 s\n" + ] + } + ], + "source": [ + "%%time\n", + "db = FAISS.from_documents(documents, cached_embedder)" + ] + }, + { + "cell_type": "markdown", + "id": "64fc53f5-d559-467f-bf62-5daef32ffbc0", + "metadata": {}, + "source": [ + "If we try to create the vectostore again, it'll be much faster since it does not need to re-compute any embeddings." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "714cb2e2-77ba-41a8-bb83-84e75342af2d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 33.6 ms, sys: 3.96 ms, total: 37.6 ms\n", + "Wall time: 36.8 ms\n" + ] + } + ], + "source": [ + "%%time\n", + "db2 = FAISS.from_documents(documents, cached_embedder)" + ] + }, + { + "cell_type": "markdown", + "id": "1acc76b9-9c70-4160-b593-5f932c75e2b6", + "metadata": {}, + "source": [ + "And here are some of the embeddings that got created:" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "f2ca32dd-3712-4093-942b-4122f3dc8a8e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['text-embedding-ada-002614d7cf6-46f1-52fa-9d3a-740c39e7a20e',\n", + " 'text-embedding-ada-0020fc1ede2-407a-5e14-8f8f-5642214263f5',\n", + " 'text-embedding-ada-002e4ad20ef-dfaa-5916-9459-f90c6d8e8159',\n", + " 'text-embedding-ada-002a5ef11e4-0474-5725-8d80-81c91943b37f',\n", + " 'text-embedding-ada-00281426526-23fe-58be-9e84-6c7c72c8ca9a']" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "list(fs.yield_keys())[:5]" ] }, { @@ -54,7 +237,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 14, "id": "13bd1c5b-b7ba-4394-957c-7d5b5a841972", "metadata": { "tags": [] @@ -66,22 +249,22 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 15, "id": "9d99885f-99e1-498c-904d-6db539ac9466", "metadata": { "tags": [] }, "outputs": [], "source": [ - "underlying_embedder = OpenAIEmbeddings()\n", - "embedder = CacheBackedEmbedder.from_bytes_store(\n", - " underlying_embedder, store, namespace=underlying_embedder.model\n", + "underlying_embeddings = OpenAIEmbeddings()\n", + "embedder = CacheBackedEmbeddings.from_bytes_store(\n", + " underlying_embeddings, store, namespace=underlying_embeddings.model\n", ")" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 16, "id": "682eb5d4-0b7a-4dac-b8fb-3de4ca6e421c", "metadata": { "tags": [] @@ -91,8 +274,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 405 ms, sys: 32.9 ms, total: 438 ms\n", - "Wall time: 715 ms\n" + "CPU times: user 10.9 ms, sys: 916 µs, total: 11.8 ms\n", + "Wall time: 159 ms\n" ] } ], @@ -111,7 +294,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 17, "id": "f819c3ff-a212-4d06-a5f7-5eb1435c1feb", "metadata": { "tags": [] @@ -121,8 +304,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 1.55 ms, sys: 436 µs, total: 1.99 ms\n", - "Wall time: 1.99 ms\n" + "CPU times: user 1.67 ms, sys: 342 µs, total: 2.01 ms\n", + "Wall time: 2.01 ms\n" ] } ], @@ -133,7 +316,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 18, "id": "ec38fb72-90a9-4687-a483-c62c87d1f4dd", "metadata": { "tags": [] @@ -145,7 +328,7 @@ "True" ] }, - "execution_count": 6, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -161,38 +344,38 @@ "source": [ "## File system\n", "\n", - "This section covers how to use a file system store" + "This section covers how to use a file system store." ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 19, "id": "a0070271-0809-4528-97e0-2a88216846f3", "metadata": { "tags": [] }, "outputs": [], "source": [ - "fs = LocalFileStore(\"./cache/\")" + "fs = LocalFileStore(\"./test_cache/\")" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 20, "id": "0b20e9fe-f57f-4d7c-9f81-105c5f8726f4", "metadata": { "tags": [] }, "outputs": [], "source": [ - "embedder2 = CacheBackedEmbedder.from_bytes_store(\n", - " underlying_embedder, fs, namespace=underlying_embedder.model\n", + "embedder2 = CacheBackedEmbeddings.from_bytes_store(\n", + " underlying_embeddings, fs, namespace=underlying_embeddings.model\n", ")" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 21, "id": "630515fd-bf5c-4d9c-a404-9705308f3a2c", "metadata": { "tags": [] @@ -202,8 +385,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 10.5 ms, sys: 988 µs, total: 11.5 ms\n", - "Wall time: 220 ms\n" + "CPU times: user 6.89 ms, sys: 4.89 ms, total: 11.8 ms\n", + "Wall time: 184 ms\n" ] } ], @@ -214,7 +397,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 22, "id": "30e6bb87-42c9-4d08-88ac-0d22c9c449a1", "metadata": { "tags": [] @@ -224,8 +407,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "CPU times: user 3.49 ms, sys: 0 ns, total: 3.49 ms\n", - "Wall time: 3.03 ms\n" + "CPU times: user 0 ns, sys: 3.24 ms, total: 3.24 ms\n", + "Wall time: 2.84 ms\n" ] } ], @@ -239,14 +422,14 @@ "id": "12ed5a45-8352-4e0f-8583-5537397f53c0", "metadata": {}, "source": [ - "Here are the embeddings that have been persisted to the directory `./cache`. \n", + "Here are the embeddings that have been persisted to the directory `./test_cache`. \n", "\n", "Notice that the embedder takes a namespace parameter." ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 23, "id": "658e2914-05e9-44a3-a8fe-3fe17ca84039", "metadata": {}, "outputs": [ @@ -257,7 +440,7 @@ " 'text-embedding-ada-0026ba52e44-59c9-5cc9-a084-284061b13c80']" ] }, - "execution_count": 11, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } @@ -268,53 +451,84 @@ }, { "cell_type": "markdown", - "id": "c67f8e97-4851-4e26-ab6f-3418b0188dc4", + "id": "cd5f5a96-6ffa-429d-aa82-00b3f6532871", "metadata": {}, "source": [ - "## Using with a vectorstore\n", - "\n", - "Let's see this cache in action with the FAISS vectorstore." + "## Redis Store\n", + "\n" ] }, { "cell_type": "code", - "execution_count": 12, - "id": "9e4314d8-88ef-4f52-81ae-0be771168bb6", + "execution_count": 24, + "id": "4879c134-141f-48a0-acfe-7d6f30253af0", "metadata": {}, "outputs": [], "source": [ - "from langchain.document_loaders import TextLoader\n", - "from langchain.embeddings.openai import OpenAIEmbeddings\n", - "from langchain.text_splitter import CharacterTextSplitter\n", - "from langchain.vectorstores import FAISS" + "from langchain.storage import RedisStore" ] }, { "cell_type": "code", - "execution_count": 13, - "id": "30743664-38f5-425d-8216-772b64e7f348", + "execution_count": 25, + "id": "8b2bb9a0-6549-4487-8532-29ab4ab7336f", "metadata": {}, "outputs": [], "source": [ - "fs = LocalFileStore(\"./cache/\")\n", + "# For cache isolation can use a separate DB\n", + "# Or additional namepace\n", + "store = RedisStore(redis_url=\"redis://localhost:6379\", client_kwargs={'db': 2}, namespace='embedding_caches')\n", "\n", - "cached_embedder = CacheBackedEmbedder.from_bytes_store(\n", - " OpenAIEmbeddings(), fs, namespace=underlying_embedder.model\n", + "underlying_embeddings = OpenAIEmbeddings()\n", + "embedder = CacheBackedEmbeddings.from_bytes_store(\n", + " underlying_embeddings, store, namespace=underlying_embeddings.model\n", ")" ] }, { - "cell_type": "markdown", - "id": "06a6f305-724f-4b71-adef-be0169f61381", + "cell_type": "code", + "execution_count": 26, + "id": "eca3cb99-2bb3-49d5-81f9-1dee03da4b8c", "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 3.99 ms, sys: 0 ns, total: 3.99 ms\n", + "Wall time: 3.5 ms\n" + ] + } + ], "source": [ - "The cache is empty prior to embedding" + "%%time\n", + "embeddings = embedder.embed_documents([\"hello\", \"goodbye\"])" ] }, { "cell_type": "code", - "execution_count": 14, - "id": "f9ad627f-ced2-4277-b336-2434f22f2c8a", + "execution_count": 27, + "id": "317ba5d8-89f9-462c-b807-ad4ef26e518b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 2.47 ms, sys: 767 µs, total: 3.24 ms\n", + "Wall time: 2.75 ms\n" + ] + } + ], + "source": [ + "%%time\n", + "embeddings = embedder.embed_documents([\"hello\", \"goodbye\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "8a540317-5142-4491-9062-a097932b56e3", "metadata": {}, "outputs": [ { @@ -324,122 +538,35 @@ " 'text-embedding-ada-0026ba52e44-59c9-5cc9-a084-284061b13c80']" ] }, - "execution_count": 14, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "list(fs.yield_keys())" - ] - }, - { - "cell_type": "markdown", - "id": "5814aa9c-e8e4-4079-accf-53c49615971e", - "metadata": {}, - "source": [ - "Load the document, split it into chunks, embed each chunk and load it into the vector store." - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "cf958ac2-e60e-4668-b32c-8bb2d78b3c61", - "metadata": {}, - "outputs": [], - "source": [ - "raw_documents = TextLoader(\"../state_of_the_union.txt\").load()\n", - "text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)\n", - "documents = text_splitter.split_documents(raw_documents)" - ] - }, - { - "cell_type": "markdown", - "id": "fc433fec-ab64-4f11-ae8b-fc3dd76cd79a", - "metadata": {}, - "source": [ - "create the vectorstore" + "list(store.yield_keys())" ] }, { "cell_type": "code", "execution_count": 17, - "id": "3a1d7bb8-3b72-4bb5-9013-cf7729caca61", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 124 ms, sys: 22.6 ms, total: 146 ms\n", - "Wall time: 832 ms\n" - ] - } - ], - "source": [ - "%%time\n", - "db = FAISS.from_documents(documents, cached_embedder)" - ] - }, - { - "cell_type": "markdown", - "id": "c94a734c-fa66-40ce-8610-12b00b7df334", - "metadata": {}, - "source": [ - "If we try to create the vectostore again, it'll be much faster since it does not need to re-compute any embeddings." - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "714cb2e2-77ba-41a8-bb83-84e75342af2d", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 32.9 ms, sys: 286 µs, total: 33.2 ms\n", - "Wall time: 32.5 ms\n" - ] - } - ], - "source": [ - "%%time\n", - "db2 = FAISS.from_documents(documents, cached_embedder)" - ] - }, - { - "cell_type": "markdown", - "id": "93d37b2a-5406-4e2c-b786-869e2430d19d", - "metadata": {}, - "source": [ - "And here are some of the embeddings that got created:" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "f2ca32dd-3712-4093-942b-4122f3dc8a8e", + "id": "cd9b0d4a-f816-4dce-9dde-cde1ad9a65fb", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "['text-embedding-ada-002614d7cf6-46f1-52fa-9d3a-740c39e7a20e',\n", - " 'text-embedding-ada-0020fc1ede2-407a-5e14-8f8f-5642214263f5',\n", - " 'text-embedding-ada-002e885db5b-c0bd-5fbc-88b1-4d1da6020aa5',\n", - " 'text-embedding-ada-002e4ad20ef-dfaa-5916-9459-f90c6d8e8159',\n", - " 'text-embedding-ada-002a5ef11e4-0474-5725-8d80-81c91943b37f']" + "[b'embedding_caches/text-embedding-ada-002e885db5b-c0bd-5fbc-88b1-4d1da6020aa5',\n", + " b'embedding_caches/text-embedding-ada-0026ba52e44-59c9-5cc9-a084-284061b13c80']" ] }, - "execution_count": 19, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "list(fs.yield_keys())[:5]" + "list(store.client.scan_iter())" ] } ], diff --git a/libs/langchain/langchain/storage/__init__.py b/libs/langchain/langchain/storage/__init__.py index 6b0511aba33..ecc2e817f24 100644 --- a/libs/langchain/langchain/storage/__init__.py +++ b/libs/langchain/langchain/storage/__init__.py @@ -9,9 +9,11 @@ The primary goal of these storages is to support implementation of caching. from langchain.storage.encoder_backed import EncoderBackedStore from langchain.storage.file_system import LocalFileStore from langchain.storage.in_memory import InMemoryStore +from langchain.storage.redis import RedisStore __all__ = [ "EncoderBackedStore", - "LocalFileStore", "InMemoryStore", + "LocalFileStore", + "RedisStore", ] diff --git a/libs/langchain/langchain/storage/redis.py b/libs/langchain/langchain/storage/redis.py index 900144aa2c1..bfb3e4a4ae0 100644 --- a/libs/langchain/langchain/storage/redis.py +++ b/libs/langchain/langchain/storage/redis.py @@ -1,6 +1,7 @@ from typing import Any, Iterator, List, Optional, Sequence, Tuple, cast from langchain.schema import BaseStore +from langchain.utilities.redis import get_client class RedisStore(BaseStore[str, bytes]): @@ -13,7 +14,7 @@ class RedisStore(BaseStore[str, bytes]): # Instantiate the RedisStore with a Redis connection from langchain.storage import RedisStore - from langchain.vectorstores.redis import get_client + from langchain.utilities.redis import get_client client = get_client('redis://localhost:6379') redis_store = RedisStore(client) @@ -34,12 +35,22 @@ class RedisStore(BaseStore[str, bytes]): """ def __init__( - self, client: Any, *, ttl: Optional[int] = None, namespace: Optional[str] = None + self, + *, + client: Any = None, + redis_url: Optional[str] = None, + client_kwargs: Optional[dict] = None, + ttl: Optional[int] = None, + namespace: Optional[str] = None, ) -> None: """Initialize the RedisStore with a Redis connection. + Must provide either a Redis client or a redis_url with optional client_kwargs. + Args: client: A Redis connection instance + redis_url: redis url + client_kwargs: Keyword arguments to pass to the Redis client ttl: time to expire keys in seconds if provided, if None keys will never expire namespace: if provided, all keys will be prefixed with this namespace @@ -52,19 +63,32 @@ class RedisStore(BaseStore[str, bytes]): "pip install redis" ) from e - if not isinstance(client, Redis): - raise TypeError( - f"Expected Redis client, got {type(client).__name__} instead." + if client and redis_url or client and client_kwargs: + raise ValueError( + "Either a Redis client or a redis_url with optional client_kwargs " + "must be provided, but not both." ) - self.client = client + if client: + if not isinstance(client, Redis): + raise TypeError( + f"Expected Redis client, got {type(client).__name__} instead." + ) + _client = client + else: + if not redis_url: + raise ValueError( + "Either a Redis client or a redis_url must be provided." + ) + _client = get_client(redis_url, **(client_kwargs or {})) + + self.client = _client if not isinstance(ttl, int) and ttl is not None: raise TypeError(f"Expected int or None, got {type(ttl)} instead.") self.ttl = ttl self.namespace = namespace - self.namespace_delimiter = "/" def _get_prefixed_key(self, key: str) -> str: """Get the key with the namespace prefix. @@ -75,8 +99,9 @@ class RedisStore(BaseStore[str, bytes]): Returns: str: The key with the namespace prefix. """ + delimiter = "/" if self.namespace: - return f"{self.namespace}{self.namespace_delimiter}{key}" + return f"{self.namespace}{delimiter}{key}" return key def mget(self, keys: Sequence[str]) -> List[Optional[bytes]]: diff --git a/libs/langchain/tests/integration_tests/storage/test_redis.py b/libs/langchain/tests/integration_tests/storage/test_redis.py index c5b4a69593e..109c8b1f5ea 100644 --- a/libs/langchain/tests/integration_tests/storage/test_redis.py +++ b/libs/langchain/tests/integration_tests/storage/test_redis.py @@ -41,7 +41,7 @@ def redis_client() -> Redis: def test_mget(redis_client: Redis) -> None: """Test mget method.""" - store = RedisStore(redis_client, ttl=None) + store = RedisStore(client=redis_client, ttl=None) keys = ["key1", "key2"] redis_client.mset({"key1": b"value1", "key2": b"value2"}) result = store.mget(keys) @@ -50,7 +50,7 @@ def test_mget(redis_client: Redis) -> None: def test_mset(redis_client: Redis) -> None: """Test that multiple keys can be set.""" - store = RedisStore(redis_client, ttl=None) + store = RedisStore(client=redis_client, ttl=None) key_value_pairs = [("key1", b"value1"), ("key2", b"value2")] store.mset(key_value_pairs) result = redis_client.mget(["key1", "key2"]) @@ -59,7 +59,7 @@ def test_mset(redis_client: Redis) -> None: def test_mdelete(redis_client: Redis) -> None: """Test that deletion works as expected.""" - store = RedisStore(redis_client, ttl=None) + store = RedisStore(client=redis_client, ttl=None) keys = ["key1", "key2"] redis_client.mset({"key1": b"value1", "key2": b"value2"}) store.mdelete(keys) @@ -68,7 +68,7 @@ def test_mdelete(redis_client: Redis) -> None: def test_yield_keys(redis_client: Redis) -> None: - store = RedisStore(redis_client, ttl=None) + store = RedisStore(client=redis_client, ttl=None) redis_client.mset({"key1": b"value1", "key2": b"value2"}) assert sorted(store.yield_keys()) == ["key1", "key2"] assert sorted(store.yield_keys(prefix="key*")) == ["key1", "key2"] @@ -77,7 +77,7 @@ def test_yield_keys(redis_client: Redis) -> None: def test_namespace(redis_client: Redis) -> None: """Test that a namespace is prepended to all keys properly.""" - store = RedisStore(redis_client, ttl=None, namespace="meow") + store = RedisStore(client=redis_client, ttl=None, namespace="meow") key_value_pairs = [("key1", b"value1"), ("key2", b"value2")] store.mset(key_value_pairs)