mirror of
https://github.com/hwchase17/langchain.git
synced 2025-04-27 19:46:55 +00:00
x
This commit is contained in:
commit
01907e5418
@ -713,7 +713,7 @@ Callback handlers can either be `sync` or `async`:
|
||||
* Sync callback handlers implement the [BaseCallbackHandler](https://python.langchain.com/api_reference/core/callbacks/langchain_core.callbacks.base.BaseCallbackHandler.html) interface.
|
||||
* Async callback handlers implement the [AsyncCallbackHandler](https://python.langchain.com/api_reference/core/callbacks/langchain_core.callbacks.base.AsyncCallbackHandler.html) interface.
|
||||
|
||||
During run-time LangChain configures an appropriate callback manager (e.g., [CallbackManager](https://python.langchain.com/api_reference/core/callbacks/langchain_core.callbacks.manager.CallbackManager.html) or [AsyncCallbackManager](https://python.langchain.com/api_reference/core/callbacks/langchain_core.callbacks.manager.AsyncCallbackManager.html) which will be responsible for calling the appropriate method on each "registered" callback handler when the event is triggered.
|
||||
During run-time LangChain configures an appropriate callback manager (e.g., [CallbackManager](https://python.langchain.com/api_reference/core/callbacks/langchain_core.callbacks.manager.CallbackManager.html) or [AsyncCallbackManager](https://python.langchain.com/api_reference/core/callbacks/langchain_core.callbacks.manager.AsyncCallbackManager.html)) which will be responsible for calling the appropriate method on each "registered" callback handler when the event is triggered.
|
||||
|
||||
#### Passing callbacks
|
||||
|
||||
|
@ -22,7 +22,7 @@
|
||||
"2. LangChain [Runnables](/docs/concepts#runnable-interface);\n",
|
||||
"3. By sub-classing from [BaseTool](https://python.langchain.com/api_reference/core/tools/langchain_core.tools.BaseTool.html) -- This is the most flexible method, it provides the largest degree of control, at the expense of more effort and code.\n",
|
||||
"\n",
|
||||
"Creating tools from functions may be sufficient for most use cases, and can be done via a simple [@tool decorator](https://python.langchain.com/api_reference/core/tools/langchain_core.tools.tool.html#langchain_core.tools.tool). If more configuration is needed-- e.g., specification of both sync and async implementations-- one can also use the [StructuredTool.from_function](https://python.langchain.com/api_reference/core/tools/langchain_core.tools.StructuredTool.html#langchain_core.tools.StructuredTool.from_function) class method.\n",
|
||||
"Creating tools from functions may be sufficient for most use cases, and can be done via a simple [@tool decorator](https://python.langchain.com/api_reference/core/tools/langchain_core.tools.tool.html#langchain_core.tools.tool). If more configuration is needed-- e.g., specification of both sync and async implementations-- one can also use the [StructuredTool.from_function](https://python.langchain.com/api_reference/core/tools/langchain_core.tools.structured.StructuredTool.html#langchain_core.tools.structured.StructuredTool.from_function) class method.\n",
|
||||
"\n",
|
||||
"In this guide we provide an overview of these methods.\n",
|
||||
"\n",
|
||||
|
@ -19,7 +19,7 @@
|
||||
"source": [
|
||||
"# ChatSambaStudio\n",
|
||||
"\n",
|
||||
"This will help you getting started with SambaNovaCloud [chat models](/docs/concepts/#chat-models). For detailed documentation of all ChatStudio features and configurations head to the [API reference](https://api.python.langchain.com/en/latest/chat_models/langchain_community.chat_models.sambanova.ChatSambaStudio.html).\n",
|
||||
"This will help you getting started with SambaStudio [chat models](/docs/concepts/#chat-models). For detailed documentation of all ChatStudio features and configurations head to the [API reference](https://api.python.langchain.com/en/latest/chat_models/langchain_community.chat_models.sambanova.ChatSambaStudio.html).\n",
|
||||
"\n",
|
||||
"**[SambaNova](https://sambanova.ai/)'s** [SambaStudio](https://docs.sambanova.ai/sambastudio/latest/sambastudio-intro.html) SambaStudio is a rich, GUI-based platform that provides the functionality to train, deploy, and manage models in SambaNova [DataScale](https://sambanova.ai/products/datascale) systems.\n",
|
||||
"\n",
|
||||
|
@ -26,33 +26,32 @@
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Setup\n",
|
||||
"\n",
|
||||
"### Credentials \n",
|
||||
"\n",
|
||||
"You will need to get your own API key. Go to [this page](https://firecrawl.dev) to learn more."
|
||||
"## Setup"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"if \"FIRECRAWL_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"FIRECRAWL_API_KEY\"] = getpass.getpass(\"Enter your Firecrawl API key: \")"
|
||||
"pip install firecrawl-py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Installation\n",
|
||||
"\n",
|
||||
"You will need to install both the `langchain_community` and `firecrawl-py` pacakges:"
|
||||
"## Usage"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"You will need to get your own API key. See https://firecrawl.dev"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -61,42 +60,12 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install -qU firecrawl-py==0.0.20 langchain_community"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Initialization\n",
|
||||
"\n",
|
||||
"### Modes\n",
|
||||
"\n",
|
||||
"- `scrape`: Scrape single url and return the markdown.\n",
|
||||
"- `crawl`: Crawl the url and all accessible sub pages and return the markdown for each one."
|
||||
"from langchain_community.document_loaders.firecrawl import FireCrawlLoader"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.document_loaders import FireCrawlLoader\n",
|
||||
"\n",
|
||||
"loader = FireCrawlLoader(url=\"https://firecrawl.dev\", mode=\"crawl\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Load"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@ -111,40 +80,14 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"docs = loader.load()\n",
|
||||
"\n",
|
||||
"docs[0]"
|
||||
"loader = FireCrawlLoader(\n",
|
||||
" api_key=\"YOUR_API_KEY\", url=\"https://firecrawl.dev\", mode=\"scrape\"\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'ogUrl': 'https://www.firecrawl.dev/', 'title': 'Home - Firecrawl', 'robots': 'follow, index', 'ogImage': 'https://www.firecrawl.dev/og.png?123', 'ogTitle': 'Firecrawl', 'sitemap': {'lastmod': '2024-08-12T00:28:16.681Z', 'changefreq': 'weekly'}, 'keywords': 'Firecrawl,Markdown,Data,Mendable,Langchain', 'sourceURL': 'https://www.firecrawl.dev/', 'ogSiteName': 'Firecrawl', 'description': 'Firecrawl crawls and converts any website into clean markdown.', 'ogDescription': 'Turn any website into LLM-ready data.', 'pageStatusCode': 200, 'ogLocaleAlternate': []}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(docs[0].metadata)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Lazy Load\n",
|
||||
"\n",
|
||||
"You can use lazy loading to minimize memory requirements."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -160,39 +103,61 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"8"
|
||||
]
|
||||
},
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"len(pages)"
|
||||
"pages"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Modes\n",
|
||||
"\n",
|
||||
"- `scrape`: Scrape single url and return the markdown.\n",
|
||||
"- `crawl`: Crawl the url and all accessible sub pages and return the markdown for each one.\n",
|
||||
"- `map`: Maps the URL and returns a list of semantically related pages."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Crawl\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Introducing [Smart Crawl!](https://www.firecrawl.dev/smart-crawl)\n",
|
||||
" Join the waitlist to turn any web\n",
|
||||
"{'ogUrl': 'https://www.firecrawl.dev/blog/introducing-fire-engine-for-firecrawl', 'title': 'Introducing Fire Engine for Firecrawl', 'robots': 'follow, index', 'ogImage': 'https://www.firecrawl.dev/images/blog/fire-engine-launch.png', 'ogTitle': 'Introducing Fire Engine for Firecrawl', 'sitemap': {'lastmod': '2024-08-06T00:00:00.000Z', 'changefreq': 'weekly'}, 'keywords': 'firecrawl,fireengine,web crawling,dashboard,web scraping,LLM,data extraction', 'sourceURL': 'https://www.firecrawl.dev/blog/introducing-fire-engine-for-firecrawl', 'ogSiteName': 'Firecrawl', 'description': 'The most scalable, reliable, and fast way to get web data for Firecrawl.', 'ogDescription': 'The most scalable, reliable, and fast way to get web data for Firecrawl.', 'pageStatusCode': 200, 'ogLocaleAlternate': []}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"loader = FireCrawlLoader(\n",
|
||||
" api_key=\"YOUR_API_KEY\",\n",
|
||||
" url=\"https://firecrawl.dev\",\n",
|
||||
" mode=\"crawl\",\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"data = loader.load()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(pages[0].page_content[:100])\n",
|
||||
"print(pages[0].metadata)"
|
||||
@ -202,10 +167,54 @@
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Crawler Options\n",
|
||||
"#### Crawl Options\n",
|
||||
"\n",
|
||||
"You can also pass `params` to the loader. This is a dictionary of options to pass to the crawler. See the [FireCrawl API documentation](https://github.com/mendableai/firecrawl-py) for more information.\n",
|
||||
"\n"
|
||||
"You can also pass `params` to the loader. This is a dictionary of options to pass to the crawler. See the [FireCrawl API documentation](https://github.com/mendableai/firecrawl-py) for more information."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"source": [
|
||||
"### Map"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"loader = FireCrawlLoader(api_key=\"YOUR_API_KEY\", url=\"firecrawl.dev\", mode=\"map\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"docs = loader.load()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"docs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"#### Map Options\n",
|
||||
"\n",
|
||||
"You can also pass `params` to the loader. This is a dictionary of options to pass to the loader. See the [FireCrawl API documentation](https://github.com/mendableai/firecrawl-py) for more information."
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -220,7 +229,7 @@
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "langchain",
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
|
@ -74,6 +74,24 @@
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"(Optional) To increase the retry time for getting a function execution response, set environment variable UC_TOOL_CLIENT_EXECUTION_TIMEOUT. Default retry time value is 120s."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"UC_TOOL_CLIENT_EXECUTION_TIMEOUT\"] = \"200\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
|
@ -438,7 +438,7 @@
|
||||
"app = workflow.compile(checkpointer=MemorySaver())\n",
|
||||
"\n",
|
||||
"# Async invocation:\n",
|
||||
"output = await app.ainvoke({\"messages\": input_messages}, config):\n",
|
||||
"output = await app.ainvoke({\"messages\": input_messages}, config)\n",
|
||||
"output[\"messages\"][-1].pretty_print()\n",
|
||||
"```\n",
|
||||
"\n",
|
||||
|
@ -26,13 +26,14 @@
|
||||
"@docusaurus/preset-classic": "3.5.2",
|
||||
"@docusaurus/remark-plugin-npm2yarn": "^3.5.2",
|
||||
"@docusaurus/theme-mermaid": "3.5.2",
|
||||
"prism-react-renderer": "^2.1.0",
|
||||
"@giscus/react": "^3.0.0",
|
||||
"@mdx-js/react": "^3",
|
||||
"@supabase/supabase-js": "^2.39.7",
|
||||
"clsx": "^1.2.1",
|
||||
"cookie": "^0.6.0",
|
||||
"isomorphic-dompurify": "^2.7.0",
|
||||
"json-loader": "^0.5.7",
|
||||
"prism-react-renderer": "^2.1.0",
|
||||
"process": "^0.11.10",
|
||||
"react": "^18",
|
||||
"react-dom": "^18",
|
||||
|
@ -1,11 +1,25 @@
|
||||
import React from 'react';
|
||||
import Paginator from '@theme-original/DocItem/Paginator';
|
||||
import Feedback from "@theme/Feedback";
|
||||
import Giscus from "@giscus/react";
|
||||
|
||||
export default function PaginatorWrapper(props) {
|
||||
return (
|
||||
<>
|
||||
<Feedback />
|
||||
<Giscus
|
||||
repo="langchain-ai/langchain"
|
||||
repoId="R_kgDOIPDwlg"
|
||||
category="Docs Discussions"
|
||||
categoryId="DIC_kwDOIPDwls4CjJYb"
|
||||
mapping="pathname"
|
||||
strict="0"
|
||||
reactionsEnabled="0"
|
||||
emitMetadata="0"
|
||||
inputPosition="bottom"
|
||||
theme="preferred_color_scheme"
|
||||
lang="en"
|
||||
loading="lazy" />
|
||||
<Paginator {...props} />
|
||||
</>
|
||||
);
|
||||
|
@ -220,10 +220,6 @@ export default function Feedback() {
|
||||
onMouseUp: (e) => (e.currentTarget.style.backgroundColor = "#f0f0f0"),
|
||||
};
|
||||
|
||||
const newGithubIssueURL = pathname
|
||||
? `https://github.com/langchain-ai/langchain/issues/new?assignees=&labels=03+-+Documentation&projects=&template=documentation.yml&title=DOC%3A+%3CIssue+related+to+${pathname}%3E&url=https://python.langchain.com${pathname}`
|
||||
: "https://github.com/langchain-ai/langchain/issues/new?assignees=&labels=03+-+Documentation&projects=&template=documentation.yml&title=DOC%3A+%3CPlease+write+a+comprehensive+title+after+the+%27DOC%3A+%27+prefix%3E";
|
||||
|
||||
return (
|
||||
<div style={{ display: "flex", flexDirection: "column" }}>
|
||||
<hr />
|
||||
@ -296,14 +292,6 @@ export default function Feedback() {
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
<br />
|
||||
<h4>
|
||||
You can also leave detailed feedback{" "}
|
||||
<a target="_blank" href={newGithubIssueURL}>
|
||||
on GitHub
|
||||
</a>
|
||||
.
|
||||
</h4>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -1709,6 +1709,13 @@
|
||||
resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.57.1.tgz#de633db3ec2ef6a3c89e2f19038063e8a122e2c2"
|
||||
integrity sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==
|
||||
|
||||
"@giscus/react@^3.0.0":
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@giscus/react/-/react-3.0.0.tgz#fdadce2c7e4023eb4fdbcc219cdd97f6d7aa17f0"
|
||||
integrity sha512-hgCjLpg3Wgh8VbTF5p8ZLcIHI74wvDk1VIFv12+eKhenNVUDjgwNg2B1aq/3puyHOad47u/ZSyqiMtohjy/OOA==
|
||||
dependencies:
|
||||
giscus "^1.5.0"
|
||||
|
||||
"@hapi/hoek@^9.0.0", "@hapi/hoek@^9.3.0":
|
||||
version "9.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-9.3.0.tgz#8368869dcb735be2e7f5cb7647de78e167a251fb"
|
||||
@ -1835,6 +1842,18 @@
|
||||
resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz#4fc56c15c580b9adb7dc3c333a134e540b44bfb1"
|
||||
integrity sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==
|
||||
|
||||
"@lit-labs/ssr-dom-shim@^1.2.0":
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/@lit-labs/ssr-dom-shim/-/ssr-dom-shim-1.2.1.tgz#2f3a8f1d688935c704dbc89132394a41029acbb8"
|
||||
integrity sha512-wx4aBmgeGvFmOKucFKY+8VFJSYZxs9poN3SDNQFF6lT6NrQUnHiPB2PWz2sc4ieEcAaYYzN+1uWahEeTq2aRIQ==
|
||||
|
||||
"@lit/reactive-element@^2.0.4":
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/@lit/reactive-element/-/reactive-element-2.0.4.tgz#8f2ed950a848016383894a26180ff06c56ae001b"
|
||||
integrity sha512-GFn91inaUa2oHLak8awSIigYz0cU0Payr1rcFsrkf5OJ5eSPxElyZfKh0f2p9FsTiZWXQdWGJeXZICEfXXYSXQ==
|
||||
dependencies:
|
||||
"@lit-labs/ssr-dom-shim" "^1.2.0"
|
||||
|
||||
"@mdx-js/mdx@^3.0.0":
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@mdx-js/mdx/-/mdx-3.0.1.tgz#617bd2629ae561fdca1bb88e3badd947f5a82191"
|
||||
@ -2569,7 +2588,7 @@
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/trusted-types@*":
|
||||
"@types/trusted-types@*", "@types/trusted-types@^2.0.2":
|
||||
version "2.0.7"
|
||||
resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.7.tgz#baccb07a970b91707df3a3e8ba6896c57ead2d11"
|
||||
integrity sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==
|
||||
@ -5549,6 +5568,13 @@ get-symbol-description@^1.0.2:
|
||||
es-errors "^1.3.0"
|
||||
get-intrinsic "^1.2.4"
|
||||
|
||||
giscus@^1.5.0:
|
||||
version "1.5.0"
|
||||
resolved "https://registry.yarnpkg.com/giscus/-/giscus-1.5.0.tgz#8299fa056b2ed31ec8b05d4645871e016982b4b2"
|
||||
integrity sha512-t3LL0qbSO3JXq3uyQeKpF5CegstGfKX/0gI6eDe1cmnI7D56R7j52yLdzw4pdKrg3VnufwCgCM3FDz7G1Qr6lg==
|
||||
dependencies:
|
||||
lit "^3.1.2"
|
||||
|
||||
github-slugger@^1.5.0:
|
||||
version "1.5.0"
|
||||
resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-1.5.0.tgz#17891bbc73232051474d68bd867a34625c955f7d"
|
||||
@ -6882,6 +6908,31 @@ lines-and-columns@^1.1.6:
|
||||
resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632"
|
||||
integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==
|
||||
|
||||
lit-element@^4.1.0:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/lit-element/-/lit-element-4.1.1.tgz#07905992815076e388cf6f1faffc7d6866c82007"
|
||||
integrity sha512-HO9Tkkh34QkTeUmEdNYhMT8hzLid7YlMlATSi1q4q17HE5d9mrrEHJ/o8O2D0cMi182zK1F3v7x0PWFjrhXFew==
|
||||
dependencies:
|
||||
"@lit-labs/ssr-dom-shim" "^1.2.0"
|
||||
"@lit/reactive-element" "^2.0.4"
|
||||
lit-html "^3.2.0"
|
||||
|
||||
lit-html@^3.2.0:
|
||||
version "3.2.1"
|
||||
resolved "https://registry.yarnpkg.com/lit-html/-/lit-html-3.2.1.tgz#8fc49e3531ee5947e4d93e8a5aa642ab1649833b"
|
||||
integrity sha512-qI/3lziaPMSKsrwlxH/xMgikhQ0EGOX2ICU73Bi/YHFvz2j/yMCIrw4+puF2IpQ4+upd3EWbvnHM9+PnJn48YA==
|
||||
dependencies:
|
||||
"@types/trusted-types" "^2.0.2"
|
||||
|
||||
lit@^3.1.2:
|
||||
version "3.2.1"
|
||||
resolved "https://registry.yarnpkg.com/lit/-/lit-3.2.1.tgz#d6dd15eac20db3a098e81e2c85f70a751ff55592"
|
||||
integrity sha512-1BBa1E/z0O9ye5fZprPtdqnc0BFzxIxTTOO/tQFmyC/hj1O3jL4TfmLBw0WEwjAokdLwpclkvGgDJwTIh0/22w==
|
||||
dependencies:
|
||||
"@lit/reactive-element" "^2.0.4"
|
||||
lit-element "^4.1.0"
|
||||
lit-html "^3.2.0"
|
||||
|
||||
loader-runner@^4.2.0:
|
||||
version "4.3.0"
|
||||
resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1"
|
||||
|
@ -118,8 +118,8 @@ class O365BaseLoader(BaseLoader, BaseModel):
|
||||
metadata_dict[file.name] = {
|
||||
"source": file.web_url,
|
||||
"mime_type": file.mime_type,
|
||||
"created": file.created,
|
||||
"modified": file.modified,
|
||||
"created": str(file.created),
|
||||
"modified": str(file.modified),
|
||||
"created_by": str(file.created_by),
|
||||
"modified_by": str(file.modified_by),
|
||||
"description": file.description,
|
||||
|
@ -1,3 +1,4 @@
|
||||
import warnings
|
||||
from typing import Iterator, Literal, Optional
|
||||
|
||||
from langchain_core.document_loaders import BaseLoader
|
||||
@ -48,7 +49,6 @@ class FireCrawlLoader(BaseLoader):
|
||||
Join the waitlist to turn any web
|
||||
{'ogUrl': 'https://www.firecrawl.dev/', 'title': 'Home - Firecrawl', 'robots': 'follow, index', 'ogImage': 'https://www.firecrawl.dev/og.png?123', 'ogTitle': 'Firecrawl', 'sitemap': {'lastmod': '2024-08-12T00:28:16.681Z', 'changefreq': 'weekly'}, 'keywords': 'Firecrawl,Markdown,Data,Mendable,Langchain', 'sourceURL': 'https://www.firecrawl.dev/', 'ogSiteName': 'Firecrawl', 'description': 'Firecrawl crawls and converts any website into clean markdown.', 'ogDescription': 'Turn any website into LLM-ready data.', 'pageStatusCode': 200, 'ogLocaleAlternate': []}
|
||||
|
||||
|
||||
Async load:
|
||||
.. code-block:: python
|
||||
|
||||
@ -64,13 +64,169 @@ class FireCrawlLoader(BaseLoader):
|
||||
|
||||
""" # noqa: E501
|
||||
|
||||
def legacy_crawler_options_adapter(self, params: dict) -> dict:
|
||||
use_legacy_options = False
|
||||
legacy_keys = [
|
||||
"includes",
|
||||
"excludes",
|
||||
"allowBackwardCrawling",
|
||||
"allowExternalContentLinks",
|
||||
"pageOptions",
|
||||
]
|
||||
for key in legacy_keys:
|
||||
if params.get(key):
|
||||
use_legacy_options = True
|
||||
break
|
||||
|
||||
if use_legacy_options:
|
||||
warnings.warn(
|
||||
"Deprecated parameters detected. See Firecrawl v1 docs for updates.",
|
||||
DeprecationWarning,
|
||||
)
|
||||
if "includes" in params:
|
||||
if params["includes"] is True:
|
||||
params["includePaths"] = params["includes"]
|
||||
del params["includes"]
|
||||
|
||||
if "excludes" in params:
|
||||
if params["excludes"] is True:
|
||||
params["excludePaths"] = params["excludes"]
|
||||
del params["excludes"]
|
||||
|
||||
if "allowBackwardCrawling" in params:
|
||||
if params["allowBackwardCrawling"] is True:
|
||||
params["allowBackwardLinks"] = params["allowBackwardCrawling"]
|
||||
del params["allowBackwardCrawling"]
|
||||
|
||||
if "allowExternalContentLinks" in params:
|
||||
if params["allowExternalContentLinks"] is True:
|
||||
params["allowExternalLinks"] = params["allowExternalContentLinks"]
|
||||
del params["allowExternalContentLinks"]
|
||||
|
||||
if "pageOptions" in params:
|
||||
if isinstance(params["pageOptions"], dict):
|
||||
params["scrapeOptions"] = self.legacy_scrape_options_adapter(
|
||||
params["pageOptions"]
|
||||
)
|
||||
del params["pageOptions"]
|
||||
|
||||
return params
|
||||
|
||||
def legacy_scrape_options_adapter(self, params: dict) -> dict:
|
||||
use_legacy_options = False
|
||||
formats = ["markdown"]
|
||||
|
||||
if "extractorOptions" in params:
|
||||
if "mode" in params["extractorOptions"]:
|
||||
if (
|
||||
params["extractorOptions"]["mode"] == "llm-extraction"
|
||||
or params["extractorOptions"]["mode"]
|
||||
== "llm-extraction-from-raw-html"
|
||||
or params["extractorOptions"]["mode"]
|
||||
== "llm-extraction-from-markdown"
|
||||
):
|
||||
use_legacy_options = True
|
||||
if "extractionPrompt" in params["extractorOptions"]:
|
||||
if params["extractorOptions"]["extractionPrompt"]:
|
||||
params["prompt"] = params["extractorOptions"][
|
||||
"extractionPrompt"
|
||||
]
|
||||
else:
|
||||
params["prompt"] = params["extractorOptions"].get(
|
||||
"extractionPrompt",
|
||||
"Extract page information based on the schema.",
|
||||
)
|
||||
|
||||
if "extractionSchema" in params["extractorOptions"]:
|
||||
if params["extractorOptions"]["extractionSchema"]:
|
||||
params["schema"] = params["extractorOptions"][
|
||||
"extractionSchema"
|
||||
]
|
||||
|
||||
if "userPrompt" in params["extractorOptions"]:
|
||||
if params["extractorOptions"]["userPrompt"]:
|
||||
params["prompt"] = params["extractorOptions"]["userPrompt"]
|
||||
|
||||
del params["extractorOptions"]
|
||||
|
||||
scrape_keys = [
|
||||
"includeMarkdown",
|
||||
"includeHtml",
|
||||
"includeRawHtml",
|
||||
"includeExtract",
|
||||
"includeLinks",
|
||||
"screenshot",
|
||||
"fullPageScreenshot",
|
||||
"onlyIncludeTags",
|
||||
"removeTags",
|
||||
]
|
||||
for key in scrape_keys:
|
||||
if params.get(key):
|
||||
use_legacy_options = True
|
||||
break
|
||||
|
||||
if use_legacy_options:
|
||||
warnings.warn(
|
||||
"Deprecated parameters detected. See Firecrawl v1 docs for updates.",
|
||||
DeprecationWarning,
|
||||
)
|
||||
if "includeMarkdown" in params:
|
||||
if params["includeMarkdown"] is False:
|
||||
formats.remove("markdown")
|
||||
del params["includeMarkdown"]
|
||||
|
||||
if "includeHtml" in params:
|
||||
if params["includeHtml"] is True:
|
||||
formats.append("html")
|
||||
del params["includeHtml"]
|
||||
|
||||
if "includeRawHtml" in params:
|
||||
if params["includeRawHtml"] is True:
|
||||
formats.append("rawHtml")
|
||||
del params["includeRawHtml"]
|
||||
|
||||
if "includeExtract" in params:
|
||||
if params["includeExtract"] is True:
|
||||
formats.append("extract")
|
||||
del params["includeExtract"]
|
||||
|
||||
if "includeLinks" in params:
|
||||
if params["includeLinks"] is True:
|
||||
formats.append("links")
|
||||
del params["includeLinks"]
|
||||
|
||||
if "screenshot" in params:
|
||||
if params["screenshot"] is True:
|
||||
formats.append("screenshot")
|
||||
del params["screenshot"]
|
||||
|
||||
if "fullPageScreenshot" in params:
|
||||
if params["fullPageScreenshot"] is True:
|
||||
formats.append("screenshot@fullPage")
|
||||
del params["fullPageScreenshot"]
|
||||
|
||||
if "onlyIncludeTags" in params:
|
||||
if params["onlyIncludeTags"] is True:
|
||||
params["includeTags"] = params["onlyIncludeTags"]
|
||||
del params["onlyIncludeTags"]
|
||||
|
||||
if "removeTags" in params:
|
||||
if params["removeTags"] is True:
|
||||
params["excludeTags"] = params["removeTags"]
|
||||
del params["removeTags"]
|
||||
|
||||
if "formats" not in params:
|
||||
params["formats"] = formats
|
||||
|
||||
return params
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
*,
|
||||
api_key: Optional[str] = None,
|
||||
api_url: Optional[str] = None,
|
||||
mode: Literal["crawl", "scrape"] = "crawl",
|
||||
mode: Literal["crawl", "scrape", "map"] = "crawl",
|
||||
params: Optional[dict] = None,
|
||||
):
|
||||
"""Initialize with API key and url.
|
||||
@ -82,8 +238,9 @@ class FireCrawlLoader(BaseLoader):
|
||||
api_url: The Firecrawl API URL. If not specified will be read from env var
|
||||
FIRECRAWL_API_URL or defaults to https://api.firecrawl.dev.
|
||||
mode: The mode to run the loader in. Default is "crawl".
|
||||
Options include "scrape" (single url) and
|
||||
"crawl" (all accessible sub pages).
|
||||
Options include "scrape" (single url),
|
||||
"crawl" (all accessible sub pages),
|
||||
"map" (returns list of links that are semantically related).
|
||||
params: The parameters to pass to the Firecrawl API.
|
||||
Examples include crawlerOptions.
|
||||
For more details, visit: https://github.com/mendableai/firecrawl-py
|
||||
@ -95,30 +252,58 @@ class FireCrawlLoader(BaseLoader):
|
||||
raise ImportError(
|
||||
"`firecrawl` package not found, please run `pip install firecrawl-py`"
|
||||
)
|
||||
if mode not in ("crawl", "scrape"):
|
||||
if mode not in ("crawl", "scrape", "search", "map"):
|
||||
raise ValueError(
|
||||
f"Unrecognized mode '{mode}'. Expected one of 'crawl', 'scrape'."
|
||||
f"Invalid mode '{mode}'. Allowed: 'crawl', 'scrape', 'search', 'map'."
|
||||
)
|
||||
|
||||
if not url:
|
||||
raise ValueError("Url must be provided")
|
||||
|
||||
api_key = api_key or get_from_env("api_key", "FIRECRAWL_API_KEY")
|
||||
self.firecrawl = FirecrawlApp(api_key=api_key, api_url=api_url)
|
||||
self.url = url
|
||||
self.mode = mode
|
||||
self.params = params
|
||||
self.params = params or {}
|
||||
|
||||
def lazy_load(self) -> Iterator[Document]:
|
||||
if self.mode == "scrape":
|
||||
firecrawl_docs = [self.firecrawl.scrape_url(self.url, params=self.params)]
|
||||
firecrawl_docs = [
|
||||
self.firecrawl.scrape_url(
|
||||
self.url, params=self.legacy_scrape_options_adapter(self.params)
|
||||
)
|
||||
]
|
||||
elif self.mode == "crawl":
|
||||
firecrawl_docs = self.firecrawl.crawl_url(self.url, params=self.params)
|
||||
if not self.url:
|
||||
raise ValueError("URL is required for crawl mode")
|
||||
crawl_response = self.firecrawl.crawl_url(
|
||||
self.url, params=self.legacy_crawler_options_adapter(self.params)
|
||||
)
|
||||
firecrawl_docs = crawl_response.get("data", [])
|
||||
elif self.mode == "map":
|
||||
if not self.url:
|
||||
raise ValueError("URL is required for map mode")
|
||||
firecrawl_docs = self.firecrawl.map_url(self.url, params=self.params)
|
||||
elif self.mode == "search":
|
||||
raise ValueError(
|
||||
"Search mode is not supported in this version, please downgrade."
|
||||
)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Unrecognized mode '{self.mode}'. Expected one of 'crawl', 'scrape'."
|
||||
f"Invalid mode '{self.mode}'. Allowed: 'crawl', 'scrape', 'map'."
|
||||
)
|
||||
for doc in firecrawl_docs:
|
||||
metadata = doc.get("metadata", {})
|
||||
if (self.params is not None) and self.params.get(
|
||||
"extractorOptions", {}
|
||||
).get("mode") == "llm-extraction":
|
||||
metadata["llm_extraction"] = doc.get("llm_extraction")
|
||||
|
||||
yield Document(page_content=doc.get("markdown", ""), metadata=metadata)
|
||||
if self.mode == "map":
|
||||
page_content = doc
|
||||
metadata = {}
|
||||
else:
|
||||
page_content = (
|
||||
doc.get("markdown") or doc.get("html") or doc.get("rawHtml", "")
|
||||
)
|
||||
metadata = doc.get("metadata", {})
|
||||
if not page_content:
|
||||
continue
|
||||
yield Document(
|
||||
page_content=page_content,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
@ -430,7 +430,7 @@ class Neo4jGraph(GraphStore):
|
||||
try:
|
||||
data, _, _ = self._driver.execute_query(
|
||||
Query(text=query, timeout=self.timeout),
|
||||
database=self._database,
|
||||
database_=self._database,
|
||||
parameters_=params,
|
||||
)
|
||||
json_data = [r.data() for r in data]
|
||||
@ -457,7 +457,7 @@ class Neo4jGraph(GraphStore):
|
||||
):
|
||||
raise
|
||||
# fallback to allow implicit transactions
|
||||
with self._driver.session() as session:
|
||||
with self._driver.session(database=self._database) as session:
|
||||
data = session.run(Query(text=query, timeout=self.timeout), params)
|
||||
json_data = [r.data() for r in data]
|
||||
if self.sanitize:
|
||||
|
@ -396,7 +396,7 @@ class BedrockBase(BaseModel, ABC):
|
||||
"""Validate that AWS credentials to and python package exists in environment."""
|
||||
|
||||
# Skip creating new client if passed in constructor
|
||||
if values["client"] is not None:
|
||||
if values.get("client") is not None:
|
||||
return values
|
||||
|
||||
try:
|
||||
|
@ -1,5 +1,8 @@
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from io import StringIO
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional
|
||||
@ -7,7 +10,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional
|
||||
if TYPE_CHECKING:
|
||||
from databricks.sdk import WorkspaceClient
|
||||
from databricks.sdk.service.catalog import FunctionInfo
|
||||
from databricks.sdk.service.sql import StatementParameterListItem
|
||||
from databricks.sdk.service.sql import StatementParameterListItem, StatementState
|
||||
|
||||
EXECUTE_FUNCTION_ARG_NAME = "__execution_args__"
|
||||
DEFAULT_EXECUTE_FUNCTION_ARGS = {
|
||||
@ -15,6 +18,9 @@ DEFAULT_EXECUTE_FUNCTION_ARGS = {
|
||||
"row_limit": 100,
|
||||
"byte_limit": 4096,
|
||||
}
|
||||
UC_TOOL_CLIENT_EXECUTION_TIMEOUT = "UC_TOOL_CLIENT_EXECUTION_TIMEOUT"
|
||||
DEFAULT_UC_TOOL_CLIENT_EXECUTION_TIMEOUT = "120"
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_scalar(function: "FunctionInfo") -> bool:
|
||||
@ -174,13 +180,42 @@ def execute_function(
|
||||
parameters=parametrized_statement.parameters,
|
||||
**execute_statement_args, # type: ignore
|
||||
)
|
||||
status = response.status
|
||||
assert status is not None, f"Statement execution failed: {response}"
|
||||
if status.state != StatementState.SUCCEEDED:
|
||||
error = status.error
|
||||
if response.status and job_pending(response.status.state) and response.statement_id:
|
||||
statement_id = response.statement_id
|
||||
wait_time = 0
|
||||
retry_cnt = 0
|
||||
client_execution_timeout = int(
|
||||
os.environ.get(
|
||||
UC_TOOL_CLIENT_EXECUTION_TIMEOUT,
|
||||
DEFAULT_UC_TOOL_CLIENT_EXECUTION_TIMEOUT,
|
||||
)
|
||||
)
|
||||
while wait_time < client_execution_timeout:
|
||||
wait = min(2**retry_cnt, client_execution_timeout - wait_time)
|
||||
_logger.debug(
|
||||
f"Retrying {retry_cnt} time to get statement execution "
|
||||
f"status after {wait} seconds."
|
||||
)
|
||||
time.sleep(wait)
|
||||
response = ws.statement_execution.get_statement(statement_id) # type: ignore
|
||||
if response.status is None or not job_pending(response.status.state):
|
||||
break
|
||||
wait_time += wait
|
||||
retry_cnt += 1
|
||||
if response.status and job_pending(response.status.state):
|
||||
return FunctionExecutionResult(
|
||||
error=f"Statement execution is still pending after {wait_time} "
|
||||
"seconds. Please increase the wait_timeout argument for executing "
|
||||
f"the function or increase {UC_TOOL_CLIENT_EXECUTION_TIMEOUT} "
|
||||
"environment variable for increasing retrying time, default is "
|
||||
f"{DEFAULT_UC_TOOL_CLIENT_EXECUTION_TIMEOUT} seconds."
|
||||
)
|
||||
assert response.status is not None, f"Statement execution failed: {response}"
|
||||
if response.status.state != StatementState.SUCCEEDED:
|
||||
error = response.status.error
|
||||
assert (
|
||||
error is not None
|
||||
), "Statement execution failed but no error message was provided."
|
||||
), f"Statement execution failed but no error message was provided: {response}"
|
||||
return FunctionExecutionResult(error=f"{error.error_code}: {error.message}")
|
||||
manifest = response.manifest
|
||||
assert manifest is not None
|
||||
@ -211,3 +246,9 @@ def execute_function(
|
||||
return FunctionExecutionResult(
|
||||
format="CSV", value=csv_buffer.getvalue(), truncated=truncated
|
||||
)
|
||||
|
||||
|
||||
def job_pending(state: Optional["StatementState"]) -> bool:
|
||||
from databricks.sdk.service.sql import StatementState
|
||||
|
||||
return state in (StatementState.PENDING, StatementState.RUNNING)
|
||||
|
@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import importlib.metadata
|
||||
import typing
|
||||
import uuid
|
||||
from typing import (
|
||||
@ -18,6 +19,7 @@ from typing import (
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from packaging.version import Version # this is a lancghain-core dependency
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from cassandra.cluster import Session
|
||||
@ -30,6 +32,7 @@ from langchain_community.utilities.cassandra import SetupMode
|
||||
from langchain_community.vectorstores.utils import maximal_marginal_relevance
|
||||
|
||||
CVST = TypeVar("CVST", bound="Cassandra")
|
||||
MIN_CASSIO_VERSION = Version("0.1.10")
|
||||
|
||||
|
||||
class Cassandra(VectorStore):
|
||||
@ -110,6 +113,15 @@ class Cassandra(VectorStore):
|
||||
"Could not import cassio python package. "
|
||||
"Please install it with `pip install cassio`."
|
||||
)
|
||||
cassio_version = Version(importlib.metadata.version("cassio"))
|
||||
|
||||
if cassio_version is not None and cassio_version < MIN_CASSIO_VERSION:
|
||||
msg = (
|
||||
"Cassio version not supported. Please upgrade cassio "
|
||||
f"to version {MIN_CASSIO_VERSION} or higher."
|
||||
)
|
||||
raise ImportError(msg)
|
||||
|
||||
if not table_name:
|
||||
raise ValueError("Missing required parameter 'table_name'.")
|
||||
self.embedding = embedding
|
||||
@ -143,6 +155,9 @@ class Cassandra(VectorStore):
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
if self.session is None:
|
||||
self.session = self.table.session
|
||||
|
||||
@property
|
||||
def embeddings(self) -> Embeddings:
|
||||
return self.embedding
|
||||
@ -231,6 +246,70 @@ class Cassandra(VectorStore):
|
||||
await self.adelete_by_document_id(document_id)
|
||||
return True
|
||||
|
||||
def delete_by_metadata_filter(
|
||||
self,
|
||||
filter: dict[str, Any],
|
||||
*,
|
||||
batch_size: int = 50,
|
||||
) -> int:
|
||||
"""Delete all documents matching a certain metadata filtering condition.
|
||||
|
||||
This operation does not use the vector embeddings in any way, it simply
|
||||
removes all documents whose metadata match the provided condition.
|
||||
|
||||
Args:
|
||||
filter: Filter on the metadata to apply. The filter cannot be empty.
|
||||
batch_size: amount of deletions per each batch (until exhaustion of
|
||||
the matching documents).
|
||||
|
||||
Returns:
|
||||
A number expressing the amount of deleted documents.
|
||||
"""
|
||||
if not filter:
|
||||
msg = (
|
||||
"Method `delete_by_metadata_filter` does not accept an empty "
|
||||
"filter. Use the `clear()` method if you really want to empty "
|
||||
"the vector store."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
return self.table.find_and_delete_entries(
|
||||
metadata=filter,
|
||||
batch_size=batch_size,
|
||||
)
|
||||
|
||||
async def adelete_by_metadata_filter(
|
||||
self,
|
||||
filter: dict[str, Any],
|
||||
*,
|
||||
batch_size: int = 50,
|
||||
) -> int:
|
||||
"""Delete all documents matching a certain metadata filtering condition.
|
||||
|
||||
This operation does not use the vector embeddings in any way, it simply
|
||||
removes all documents whose metadata match the provided condition.
|
||||
|
||||
Args:
|
||||
filter: Filter on the metadata to apply. The filter cannot be empty.
|
||||
batch_size: amount of deletions per each batch (until exhaustion of
|
||||
the matching documents).
|
||||
|
||||
Returns:
|
||||
A number expressing the amount of deleted documents.
|
||||
"""
|
||||
if not filter:
|
||||
msg = (
|
||||
"Method `delete_by_metadata_filter` does not accept an empty "
|
||||
"filter. Use the `clear()` method if you really want to empty "
|
||||
"the vector store."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
return await self.table.afind_and_delete_entries(
|
||||
metadata=filter,
|
||||
batch_size=batch_size,
|
||||
)
|
||||
|
||||
def add_texts(
|
||||
self,
|
||||
texts: Iterable[str],
|
||||
@ -333,6 +412,180 @@ class Cassandra(VectorStore):
|
||||
await asyncio.gather(*tasks)
|
||||
return ids
|
||||
|
||||
def replace_metadata(
|
||||
self,
|
||||
id_to_metadata: dict[str, dict],
|
||||
*,
|
||||
batch_size: int = 50,
|
||||
) -> None:
|
||||
"""Replace the metadata of documents.
|
||||
|
||||
For each document to update, identified by its ID, the new metadata
|
||||
dictionary completely replaces what is on the store. This includes
|
||||
passing empty metadata `{}` to erase the currently-stored information.
|
||||
|
||||
Args:
|
||||
id_to_metadata: map from the Document IDs to modify to the
|
||||
new metadata for updating.
|
||||
Keys in this dictionary that do not correspond to an existing
|
||||
document will not cause an error, rather will result in new
|
||||
rows being written into the Cassandra table but without an
|
||||
associated vector: hence unreachable through vector search.
|
||||
batch_size: Number of concurrent requests to send to the server.
|
||||
|
||||
Returns:
|
||||
None if the writes succeed (otherwise an error is raised).
|
||||
"""
|
||||
ids_and_metadatas = list(id_to_metadata.items())
|
||||
for i in range(0, len(ids_and_metadatas), batch_size):
|
||||
batch_i_m = ids_and_metadatas[i : i + batch_size]
|
||||
futures = [
|
||||
self.table.put_async(
|
||||
row_id=doc_id,
|
||||
metadata=doc_md,
|
||||
)
|
||||
for doc_id, doc_md in batch_i_m
|
||||
]
|
||||
for future in futures:
|
||||
future.result()
|
||||
return
|
||||
|
||||
async def areplace_metadata(
|
||||
self,
|
||||
id_to_metadata: dict[str, dict],
|
||||
*,
|
||||
concurrency: int = 50,
|
||||
) -> None:
|
||||
"""Replace the metadata of documents.
|
||||
|
||||
For each document to update, identified by its ID, the new metadata
|
||||
dictionary completely replaces what is on the store. This includes
|
||||
passing empty metadata `{}` to erase the currently-stored information.
|
||||
|
||||
Args:
|
||||
id_to_metadata: map from the Document IDs to modify to the
|
||||
new metadata for updating.
|
||||
Keys in this dictionary that do not correspond to an existing
|
||||
document will not cause an error, rather will result in new
|
||||
rows being written into the Cassandra table but without an
|
||||
associated vector: hence unreachable through vector search.
|
||||
concurrency: Number of concurrent queries to the database.
|
||||
Defaults to 50.
|
||||
|
||||
Returns:
|
||||
None if the writes succeed (otherwise an error is raised).
|
||||
"""
|
||||
ids_and_metadatas = list(id_to_metadata.items())
|
||||
|
||||
sem = asyncio.Semaphore(concurrency)
|
||||
|
||||
async def send_concurrently(doc_id: str, doc_md: dict) -> None:
|
||||
async with sem:
|
||||
await self.table.aput(
|
||||
row_id=doc_id,
|
||||
metadata=doc_md,
|
||||
)
|
||||
|
||||
for doc_id, doc_md in ids_and_metadatas:
|
||||
tasks = [asyncio.create_task(send_concurrently(doc_id, doc_md))]
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def _row_to_document(row: Dict[str, Any]) -> Document:
|
||||
return Document(
|
||||
id=row["row_id"],
|
||||
page_content=row["body_blob"],
|
||||
metadata=row["metadata"],
|
||||
)
|
||||
|
||||
def get_by_document_id(self, document_id: str) -> Document | None:
|
||||
"""Get by document ID.
|
||||
|
||||
Args:
|
||||
document_id: the document ID to get.
|
||||
"""
|
||||
row = self.table.get(row_id=document_id)
|
||||
if row is None:
|
||||
return None
|
||||
return self._row_to_document(row=row)
|
||||
|
||||
async def aget_by_document_id(self, document_id: str) -> Document | None:
|
||||
"""Get by document ID.
|
||||
|
||||
Args:
|
||||
document_id: the document ID to get.
|
||||
"""
|
||||
row = await self.table.aget(row_id=document_id)
|
||||
if row is None:
|
||||
return None
|
||||
return self._row_to_document(row=row)
|
||||
|
||||
def metadata_search(
|
||||
self,
|
||||
metadata: dict[str, Any] = {}, # noqa: B006
|
||||
n: int = 5,
|
||||
) -> Iterable[Document]:
|
||||
"""Get documents via a metadata search.
|
||||
|
||||
Args:
|
||||
metadata: the metadata to query for.
|
||||
"""
|
||||
rows = self.table.find_entries(metadata=metadata, n=n)
|
||||
return [self._row_to_document(row=row) for row in rows if row]
|
||||
|
||||
async def ametadata_search(
|
||||
self,
|
||||
metadata: dict[str, Any] = {}, # noqa: B006
|
||||
n: int = 5,
|
||||
) -> Iterable[Document]:
|
||||
"""Get documents via a metadata search.
|
||||
|
||||
Args:
|
||||
metadata: the metadata to query for.
|
||||
"""
|
||||
rows = await self.table.afind_entries(metadata=metadata, n=n)
|
||||
return [self._row_to_document(row=row) for row in rows]
|
||||
|
||||
async def asimilarity_search_with_embedding_id_by_vector(
|
||||
self,
|
||||
embedding: List[float],
|
||||
k: int = 4,
|
||||
filter: Optional[Dict[str, str]] = None,
|
||||
body_search: Optional[Union[str, List[str]]] = None,
|
||||
) -> List[Tuple[Document, List[float], str]]:
|
||||
"""Return docs most similar to embedding vector.
|
||||
|
||||
Args:
|
||||
embedding: Embedding to look up documents similar to.
|
||||
k: Number of Documents to return. Defaults to 4.
|
||||
filter: Filter on the metadata to apply.
|
||||
body_search: Document textual search terms to apply.
|
||||
Only supported by Astra DB at the moment.
|
||||
Returns:
|
||||
List of (Document, embedding, id), the most similar to the query vector.
|
||||
"""
|
||||
kwargs: Dict[str, Any] = {}
|
||||
if filter is not None:
|
||||
kwargs["metadata"] = filter
|
||||
if body_search is not None:
|
||||
kwargs["body_search"] = body_search
|
||||
|
||||
hits = await self.table.aann_search(
|
||||
vector=embedding,
|
||||
n=k,
|
||||
**kwargs,
|
||||
)
|
||||
return [
|
||||
(
|
||||
self._row_to_document(row=hit),
|
||||
hit["vector"],
|
||||
hit["row_id"],
|
||||
)
|
||||
for hit in hits
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def _search_to_documents(
|
||||
hits: Iterable[Dict[str, Any]],
|
||||
@ -341,10 +594,7 @@ class Cassandra(VectorStore):
|
||||
# (1=most relevant), as required by this class' contract.
|
||||
return [
|
||||
(
|
||||
Document(
|
||||
page_content=hit["body_blob"],
|
||||
metadata=hit["metadata"],
|
||||
),
|
||||
Cassandra._row_to_document(row=hit),
|
||||
0.5 + 0.5 * hit["distance"],
|
||||
hit["row_id"],
|
||||
)
|
||||
@ -375,7 +625,6 @@ class Cassandra(VectorStore):
|
||||
kwargs["metadata"] = filter
|
||||
if body_search is not None:
|
||||
kwargs["body_search"] = body_search
|
||||
|
||||
hits = self.table.metric_ann_search(
|
||||
vector=embedding,
|
||||
n=k,
|
||||
@ -712,13 +961,7 @@ class Cassandra(VectorStore):
|
||||
for pf_index, pf_hit in enumerate(prefetch_hits)
|
||||
if pf_index in mmr_chosen_indices
|
||||
]
|
||||
return [
|
||||
Document(
|
||||
page_content=hit["body_blob"],
|
||||
metadata=hit["metadata"],
|
||||
)
|
||||
for hit in mmr_hits
|
||||
]
|
||||
return [Cassandra._row_to_document(row=hit) for hit in mmr_hits]
|
||||
|
||||
def max_marginal_relevance_search_by_vector(
|
||||
self,
|
||||
|
@ -309,6 +309,14 @@ class Clickhouse(VectorStore):
|
||||
**kwargs,
|
||||
)
|
||||
# Enable JSON type
|
||||
try:
|
||||
self.client.command("SET allow_experimental_json_type=1")
|
||||
except Exception as _:
|
||||
logger.debug(
|
||||
f"Clickhouse version={self.client.server_version} - "
|
||||
"There is no allow_experimental_json_type parameter."
|
||||
)
|
||||
|
||||
self.client.command("SET allow_experimental_object_type=1")
|
||||
if self.config.index_type:
|
||||
# Enable index
|
||||
|
@ -5,9 +5,10 @@ from __future__ import annotations
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from typing import Any, Iterable, List, Optional, Tuple, Type, cast
|
||||
import warnings
|
||||
from typing import Any, Iterable, List, Optional, Tuple, Type, Union, cast
|
||||
|
||||
import requests
|
||||
from httpx import Response
|
||||
from langchain_core.documents import Document
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.vectorstores import VectorStore
|
||||
@ -49,7 +50,7 @@ class InfinispanVS(VectorStore):
|
||||
embedding=RGBEmbeddings(),
|
||||
output_fields: ["texture", "color"],
|
||||
lambda_key: lambda text,meta: str(meta["_key"]),
|
||||
lambda_content: lambda item: item["color"]})
|
||||
lambda_content: lambda item: item["color"])
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@ -58,13 +59,48 @@ class InfinispanVS(VectorStore):
|
||||
ids: Optional[List[str]] = None,
|
||||
**kwargs: Any,
|
||||
):
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
cache_name: str
|
||||
Embeddings cache name. Default "vector"
|
||||
entity_name: str
|
||||
Protobuf entity name for the embeddings. Default "vector"
|
||||
text_field: str
|
||||
Protobuf field name for text. Default "text"
|
||||
vector_field: str
|
||||
Protobuf field name for vector. Default "vector"
|
||||
lambda_content: lambda
|
||||
Lambda returning the content part of an item. Default returns text_field
|
||||
lambda_metadata: lambda
|
||||
Lambda returning the metadata part of an item. Default returns items
|
||||
fields excepts text_field, vector_field, _type
|
||||
output_fields: List[str]
|
||||
List of fields to be returned from item, if None return all fields.
|
||||
Default None
|
||||
kwargs: Any
|
||||
Rest of arguments passed to Infinispan. See docs"""
|
||||
self.ispn = Infinispan(**kwargs)
|
||||
self._configuration = kwargs
|
||||
self._cache_name = str(self._configuration.get("cache_name", "vector"))
|
||||
self._entity_name = str(self._configuration.get("entity_name", "vector"))
|
||||
self._embedding = embedding
|
||||
self._textfield = self._configuration.get("textfield", "text")
|
||||
self._vectorfield = self._configuration.get("vectorfield", "vector")
|
||||
self._textfield = self._configuration.get("textfield", "")
|
||||
if self._textfield == "":
|
||||
self._textfield = self._configuration.get("text_field", "text")
|
||||
else:
|
||||
warnings.warn(
|
||||
"`textfield` is deprecated. Please use `text_field` " "param.",
|
||||
DeprecationWarning,
|
||||
)
|
||||
self._vectorfield = self._configuration.get("vectorfield", "")
|
||||
if self._vectorfield == "":
|
||||
self._vectorfield = self._configuration.get("vector_field", "vector")
|
||||
else:
|
||||
warnings.warn(
|
||||
"`vectorfield` is deprecated. Please use `vector_field` " "param.",
|
||||
DeprecationWarning,
|
||||
)
|
||||
self._to_content = self._configuration.get(
|
||||
"lambda_content", lambda item: self._default_content(item)
|
||||
)
|
||||
@ -121,7 +157,7 @@ repeated float %s = 1;
|
||||
metadata_proto += "}\n"
|
||||
return metadata_proto
|
||||
|
||||
def schema_create(self, proto: str) -> requests.Response:
|
||||
def schema_create(self, proto: str) -> Response:
|
||||
"""Deploy the schema for the vector db
|
||||
Args:
|
||||
proto(str): protobuf schema
|
||||
@ -130,14 +166,14 @@ repeated float %s = 1;
|
||||
"""
|
||||
return self.ispn.schema_post(self._entity_name + ".proto", proto)
|
||||
|
||||
def schema_delete(self) -> requests.Response:
|
||||
def schema_delete(self) -> Response:
|
||||
"""Delete the schema for the vector db
|
||||
Returns:
|
||||
An http Response containing the result of the operation
|
||||
"""
|
||||
return self.ispn.schema_delete(self._entity_name + ".proto")
|
||||
|
||||
def cache_create(self, config: str = "") -> requests.Response:
|
||||
def cache_create(self, config: str = "") -> Response:
|
||||
"""Create the cache for the vector db
|
||||
Args:
|
||||
config(str): configuration of the cache.
|
||||
@ -172,14 +208,14 @@ repeated float %s = 1;
|
||||
)
|
||||
return self.ispn.cache_post(self._cache_name, config)
|
||||
|
||||
def cache_delete(self) -> requests.Response:
|
||||
def cache_delete(self) -> Response:
|
||||
"""Delete the cache for the vector db
|
||||
Returns:
|
||||
An http Response containing the result of the operation
|
||||
"""
|
||||
return self.ispn.cache_delete(self._cache_name)
|
||||
|
||||
def cache_clear(self) -> requests.Response:
|
||||
def cache_clear(self) -> Response:
|
||||
"""Clear the cache for the vector db
|
||||
Returns:
|
||||
An http Response containing the result of the operation
|
||||
@ -193,14 +229,14 @@ repeated float %s = 1;
|
||||
"""
|
||||
return self.ispn.cache_exists(self._cache_name)
|
||||
|
||||
def cache_index_clear(self) -> requests.Response:
|
||||
def cache_index_clear(self) -> Response:
|
||||
"""Clear the index for the vector db
|
||||
Returns:
|
||||
An http Response containing the result of the operation
|
||||
"""
|
||||
return self.ispn.index_clear(self._cache_name)
|
||||
|
||||
def cache_index_reindex(self) -> requests.Response:
|
||||
def cache_index_reindex(self) -> Response:
|
||||
"""Rebuild the for the vector db
|
||||
Returns:
|
||||
An http Response containing the result of the operation
|
||||
@ -325,12 +361,16 @@ repeated float %s = 1;
|
||||
def configure(self, metadata: dict, dimension: int) -> None:
|
||||
schema = self.schema_builder(metadata, dimension)
|
||||
output = self.schema_create(schema)
|
||||
assert output.ok, "Unable to create schema. Already exists? "
|
||||
assert (
|
||||
output.status_code == self.ispn.Codes.OK
|
||||
), "Unable to create schema. Already exists? "
|
||||
"Consider using clear_old=True"
|
||||
assert json.loads(output.text)["error"] is None
|
||||
if not self.cache_exists():
|
||||
output = self.cache_create()
|
||||
assert output.ok, "Unable to create cache. Already exists? "
|
||||
assert (
|
||||
output.status_code == self.ispn.Codes.OK
|
||||
), "Unable to create cache. Already exists? "
|
||||
"Consider using clear_old=True"
|
||||
# Ensure index is clean
|
||||
self.cache_index_clear()
|
||||
@ -350,7 +390,24 @@ repeated float %s = 1;
|
||||
auto_config: Optional[bool] = True,
|
||||
**kwargs: Any,
|
||||
) -> InfinispanVS:
|
||||
"""Return VectorStore initialized from texts and embeddings."""
|
||||
"""Return VectorStore initialized from texts and embeddings.
|
||||
|
||||
In addition to parameters described by the super method, this
|
||||
implementation provides other configuration params if different
|
||||
configuration from default is needed.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
ids : List[str]
|
||||
Additional list of keys associated to the embedding. If not
|
||||
provided UUIDs will be generated
|
||||
clear_old : bool
|
||||
Whether old data must be deleted. Default True
|
||||
auto_config: bool
|
||||
Whether to do a complete server setup (caches,
|
||||
protobuf definition...). Default True
|
||||
kwargs: Any
|
||||
Rest of arguments passed to InfinispanVS. See docs"""
|
||||
infinispanvs = cls(embedding=embedding, ids=ids, **kwargs)
|
||||
if auto_config and len(metadatas or []) > 0:
|
||||
if clear_old:
|
||||
@ -381,20 +438,83 @@ class Infinispan:
|
||||
https://github.com/rigazilla/infinispan-vector#run-infinispan
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs: Any):
|
||||
self._configuration = kwargs
|
||||
self._schema = str(self._configuration.get("schema", "http"))
|
||||
self._host = str(self._configuration.get("hosts", ["127.0.0.1:11222"])[0])
|
||||
self._default_node = self._schema + "://" + self._host
|
||||
self._cache_url = str(self._configuration.get("cache_url", "/rest/v2/caches"))
|
||||
self._schema_url = str(self._configuration.get("cache_url", "/rest/v2/schemas"))
|
||||
self._use_post_for_query = str(
|
||||
self._configuration.get("use_post_for_query", True)
|
||||
)
|
||||
def __init__(
|
||||
self,
|
||||
schema: str = "http",
|
||||
user: str = "",
|
||||
password: str = "",
|
||||
hosts: List[str] = ["127.0.0.1:11222"],
|
||||
cache_url: str = "/rest/v2/caches",
|
||||
schema_url: str = "/rest/v2/schemas",
|
||||
use_post_for_query: bool = True,
|
||||
http2: bool = True,
|
||||
verify: bool = True,
|
||||
**kwargs: Any,
|
||||
):
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
schema: str
|
||||
Schema for HTTP request: "http" or "https". Default "http"
|
||||
user, password: str
|
||||
User and password if auth is required. Default None
|
||||
hosts: List[str]
|
||||
List of server addresses. Default ["127.0.0.1:11222"]
|
||||
cache_url: str
|
||||
URL endpoint for cache API. Default "/rest/v2/caches"
|
||||
schema_url: str
|
||||
URL endpoint for schema API. Default "/rest/v2/schemas"
|
||||
use_post_for_query: bool
|
||||
Whether POST method should be used for query. Default True
|
||||
http2: bool
|
||||
Whether HTTP/2 protocol should be used. `pip install "httpx[http2]"` is
|
||||
needed for HTTP/2. Default True
|
||||
verify: bool
|
||||
Whether TLS certificate must be verified. Default True
|
||||
"""
|
||||
|
||||
def req_query(
|
||||
self, query: str, cache_name: str, local: bool = False
|
||||
) -> requests.Response:
|
||||
try:
|
||||
import httpx
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import httpx python package. "
|
||||
"Please install it with `pip install httpx`"
|
||||
'or `pip install "httpx[http2]"` if you need HTTP/2.'
|
||||
)
|
||||
|
||||
self.Codes = httpx.codes
|
||||
|
||||
self._configuration = kwargs
|
||||
self._schema = schema
|
||||
self._user = user
|
||||
self._password = password
|
||||
self._host = hosts[0]
|
||||
self._default_node = self._schema + "://" + self._host
|
||||
self._cache_url = cache_url
|
||||
self._schema_url = schema_url
|
||||
self._use_post_for_query = use_post_for_query
|
||||
self._http2 = http2
|
||||
if self._user and self._password:
|
||||
if self._schema == "http":
|
||||
auth: Union[Tuple[str, str], httpx.DigestAuth] = httpx.DigestAuth(
|
||||
username=self._user, password=self._password
|
||||
)
|
||||
else:
|
||||
auth = (self._user, self._password)
|
||||
self._h2c = httpx.Client(
|
||||
http2=self._http2,
|
||||
http1=not self._http2,
|
||||
auth=auth,
|
||||
verify=verify,
|
||||
)
|
||||
else:
|
||||
self._h2c = httpx.Client(
|
||||
http2=self._http2,
|
||||
http1=not self._http2,
|
||||
verify=verify,
|
||||
)
|
||||
|
||||
def req_query(self, query: str, cache_name: str, local: bool = False) -> Response:
|
||||
"""Request a query
|
||||
Args:
|
||||
query(str): query requested
|
||||
@ -409,7 +529,7 @@ class Infinispan:
|
||||
|
||||
def _query_post(
|
||||
self, query_str: str, cache_name: str, local: bool = False
|
||||
) -> requests.Response:
|
||||
) -> Response:
|
||||
api_url = (
|
||||
self._default_node
|
||||
+ self._cache_url
|
||||
@ -420,9 +540,9 @@ class Infinispan:
|
||||
)
|
||||
data = {"query": query_str}
|
||||
data_json = json.dumps(data)
|
||||
response = requests.post(
|
||||
response = self._h2c.post(
|
||||
api_url,
|
||||
data_json,
|
||||
content=data_json,
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=REST_TIMEOUT,
|
||||
)
|
||||
@ -430,7 +550,7 @@ class Infinispan:
|
||||
|
||||
def _query_get(
|
||||
self, query_str: str, cache_name: str, local: bool = False
|
||||
) -> requests.Response:
|
||||
) -> Response:
|
||||
api_url = (
|
||||
self._default_node
|
||||
+ self._cache_url
|
||||
@ -441,10 +561,10 @@ class Infinispan:
|
||||
+ "&local="
|
||||
+ str(local)
|
||||
)
|
||||
response = requests.get(api_url, timeout=REST_TIMEOUT)
|
||||
response = self._h2c.get(api_url, timeout=REST_TIMEOUT)
|
||||
return response
|
||||
|
||||
def post(self, key: str, data: str, cache_name: str) -> requests.Response:
|
||||
def post(self, key: str, data: str, cache_name: str) -> Response:
|
||||
"""Post an entry
|
||||
Args:
|
||||
key(str): key of the entry
|
||||
@ -454,15 +574,15 @@ class Infinispan:
|
||||
An http Response containing the result of the operation
|
||||
"""
|
||||
api_url = self._default_node + self._cache_url + "/" + cache_name + "/" + key
|
||||
response = requests.post(
|
||||
response = self._h2c.post(
|
||||
api_url,
|
||||
data,
|
||||
content=data,
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=REST_TIMEOUT,
|
||||
)
|
||||
return response
|
||||
|
||||
def put(self, key: str, data: str, cache_name: str) -> requests.Response:
|
||||
def put(self, key: str, data: str, cache_name: str) -> Response:
|
||||
"""Put an entry
|
||||
Args:
|
||||
key(str): key of the entry
|
||||
@ -472,15 +592,15 @@ class Infinispan:
|
||||
An http Response containing the result of the operation
|
||||
"""
|
||||
api_url = self._default_node + self._cache_url + "/" + cache_name + "/" + key
|
||||
response = requests.put(
|
||||
response = self._h2c.put(
|
||||
api_url,
|
||||
data,
|
||||
content=data,
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=REST_TIMEOUT,
|
||||
)
|
||||
return response
|
||||
|
||||
def get(self, key: str, cache_name: str) -> requests.Response:
|
||||
def get(self, key: str, cache_name: str) -> Response:
|
||||
"""Get an entry
|
||||
Args:
|
||||
key(str): key of the entry
|
||||
@ -489,12 +609,12 @@ class Infinispan:
|
||||
An http Response containing the entry or errors
|
||||
"""
|
||||
api_url = self._default_node + self._cache_url + "/" + cache_name + "/" + key
|
||||
response = requests.get(
|
||||
response = self._h2c.get(
|
||||
api_url, headers={"Content-Type": "application/json"}, timeout=REST_TIMEOUT
|
||||
)
|
||||
return response
|
||||
|
||||
def schema_post(self, name: str, proto: str) -> requests.Response:
|
||||
def schema_post(self, name: str, proto: str) -> Response:
|
||||
"""Deploy a schema
|
||||
Args:
|
||||
name(str): name of the schema. Will be used as a key
|
||||
@ -503,10 +623,10 @@ class Infinispan:
|
||||
An http Response containing the result of the operation
|
||||
"""
|
||||
api_url = self._default_node + self._schema_url + "/" + name
|
||||
response = requests.post(api_url, proto, timeout=REST_TIMEOUT)
|
||||
response = self._h2c.post(api_url, content=proto, timeout=REST_TIMEOUT)
|
||||
return response
|
||||
|
||||
def cache_post(self, name: str, config: str) -> requests.Response:
|
||||
def cache_post(self, name: str, config: str) -> Response:
|
||||
"""Create a cache
|
||||
Args:
|
||||
name(str): name of the cache.
|
||||
@ -515,15 +635,15 @@ class Infinispan:
|
||||
An http Response containing the result of the operation
|
||||
"""
|
||||
api_url = self._default_node + self._cache_url + "/" + name
|
||||
response = requests.post(
|
||||
response = self._h2c.post(
|
||||
api_url,
|
||||
config,
|
||||
content=config,
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=REST_TIMEOUT,
|
||||
)
|
||||
return response
|
||||
|
||||
def schema_delete(self, name: str) -> requests.Response:
|
||||
def schema_delete(self, name: str) -> Response:
|
||||
"""Delete a schema
|
||||
Args:
|
||||
name(str): name of the schema.
|
||||
@ -531,10 +651,10 @@ class Infinispan:
|
||||
An http Response containing the result of the operation
|
||||
"""
|
||||
api_url = self._default_node + self._schema_url + "/" + name
|
||||
response = requests.delete(api_url, timeout=REST_TIMEOUT)
|
||||
response = self._h2c.delete(api_url, timeout=REST_TIMEOUT)
|
||||
return response
|
||||
|
||||
def cache_delete(self, name: str) -> requests.Response:
|
||||
def cache_delete(self, name: str) -> Response:
|
||||
"""Delete a cache
|
||||
Args:
|
||||
name(str): name of the cache.
|
||||
@ -542,10 +662,10 @@ class Infinispan:
|
||||
An http Response containing the result of the operation
|
||||
"""
|
||||
api_url = self._default_node + self._cache_url + "/" + name
|
||||
response = requests.delete(api_url, timeout=REST_TIMEOUT)
|
||||
response = self._h2c.delete(api_url, timeout=REST_TIMEOUT)
|
||||
return response
|
||||
|
||||
def cache_clear(self, cache_name: str) -> requests.Response:
|
||||
def cache_clear(self, cache_name: str) -> Response:
|
||||
"""Clear a cache
|
||||
Args:
|
||||
cache_name(str): name of the cache.
|
||||
@ -555,7 +675,7 @@ class Infinispan:
|
||||
api_url = (
|
||||
self._default_node + self._cache_url + "/" + cache_name + "?action=clear"
|
||||
)
|
||||
response = requests.post(api_url, timeout=REST_TIMEOUT)
|
||||
response = self._h2c.post(api_url, timeout=REST_TIMEOUT)
|
||||
return response
|
||||
|
||||
def cache_exists(self, cache_name: str) -> bool:
|
||||
@ -570,18 +690,17 @@ class Infinispan:
|
||||
)
|
||||
return self.resource_exists(api_url)
|
||||
|
||||
@staticmethod
|
||||
def resource_exists(api_url: str) -> bool:
|
||||
def resource_exists(self, api_url: str) -> bool:
|
||||
"""Check if a resource exists
|
||||
Args:
|
||||
api_url(str): url of the resource.
|
||||
Returns:
|
||||
true if resource exists
|
||||
"""
|
||||
response = requests.head(api_url, timeout=REST_TIMEOUT)
|
||||
return response.ok
|
||||
response = self._h2c.head(api_url, timeout=REST_TIMEOUT)
|
||||
return response.status_code == self.Codes.OK
|
||||
|
||||
def index_clear(self, cache_name: str) -> requests.Response:
|
||||
def index_clear(self, cache_name: str) -> Response:
|
||||
"""Clear an index on a cache
|
||||
Args:
|
||||
cache_name(str): name of the cache.
|
||||
@ -595,9 +714,9 @@ class Infinispan:
|
||||
+ cache_name
|
||||
+ "/search/indexes?action=clear"
|
||||
)
|
||||
return requests.post(api_url, timeout=REST_TIMEOUT)
|
||||
return self._h2c.post(api_url, timeout=REST_TIMEOUT)
|
||||
|
||||
def index_reindex(self, cache_name: str) -> requests.Response:
|
||||
def index_reindex(self, cache_name: str) -> Response:
|
||||
"""Rebuild index on a cache
|
||||
Args:
|
||||
cache_name(str): name of the cache.
|
||||
@ -611,4 +730,4 @@ class Infinispan:
|
||||
+ cache_name
|
||||
+ "/search/indexes?action=reindex"
|
||||
)
|
||||
return requests.post(api_url, timeout=REST_TIMEOUT)
|
||||
return self._h2c.post(api_url, timeout=REST_TIMEOUT)
|
||||
|
@ -193,6 +193,13 @@ class MyScale(VectorStore):
|
||||
password=self.config.password,
|
||||
**kwargs,
|
||||
)
|
||||
try:
|
||||
self.client.command("SET allow_experimental_json_type=1")
|
||||
except Exception as _:
|
||||
logger.debug(
|
||||
f"Clickhouse version={self.client.server_version} - "
|
||||
"There is no allow_experimental_json_type parameter."
|
||||
)
|
||||
self.client.command("SET allow_experimental_object_type=1")
|
||||
self.client.command(schema_)
|
||||
|
||||
|
@ -623,7 +623,7 @@ class Neo4jVector(VectorStore):
|
||||
params = params or {}
|
||||
try:
|
||||
data, _, _ = self._driver.execute_query(
|
||||
query, database=self._database, parameters_=params
|
||||
query, database_=self._database, parameters_=params
|
||||
)
|
||||
return [r.data() for r in data]
|
||||
except Neo4jError as e:
|
||||
@ -646,7 +646,7 @@ class Neo4jVector(VectorStore):
|
||||
):
|
||||
raise
|
||||
# Fallback to allow implicit transactions
|
||||
with self._driver.session() as session:
|
||||
with self._driver.session(database=self._database) as session:
|
||||
data = session.run(Query(text=query), params)
|
||||
return [r.data() for r in data]
|
||||
|
||||
|
1133
libs/community/poetry.lock
generated
1133
libs/community/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-community"
|
||||
version = "0.3.1"
|
||||
version = "0.3.2"
|
||||
description = "Community contributed LangChain integrations."
|
||||
authors = []
|
||||
license = "MIT"
|
||||
@ -33,13 +33,13 @@ ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogy
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.6"
|
||||
langchain = "^0.3.1"
|
||||
langchain-core = "^0.3.10"
|
||||
langchain = "^0.3.3"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
requests = "^2"
|
||||
PyYAML = ">=5.3"
|
||||
aiohttp = "^3.8.3"
|
||||
tenacity = "^8.1.0,!=8.4.0"
|
||||
tenacity = ">=8.1.0,!=8.4.0,<10"
|
||||
dataclasses-json = ">= 0.5.7, < 0.7"
|
||||
pydantic-settings = "^2.4.0"
|
||||
langsmith = "^0.1.125"
|
||||
|
@ -0,0 +1,4 @@
|
||||
#/bin/sh
|
||||
|
||||
cd infinispan
|
||||
docker compose up
|
@ -0,0 +1,2 @@
|
||||
#Fri May 03 10:19:58 CEST 2024
|
||||
user=ADMIN,admin
|
@ -0,0 +1,62 @@
|
||||
<infinispan
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="urn:infinispan:config:15.0 https://infinispan.org/schemas/infinispan-config-15.0.xsd
|
||||
urn:infinispan:server:15.0 https://infinispan.org/schemas/infinispan-server-15.0.xsd"
|
||||
xmlns="urn:infinispan:config:15.0"
|
||||
xmlns:server="urn:infinispan:server:15.0">
|
||||
|
||||
<cache-container name="default" statistics="true">
|
||||
<transport cluster="${infinispan.cluster.name:cluster}" stack="${infinispan.cluster.stack:tcp}" node-name="${infinispan.node.name:}"/>
|
||||
</cache-container>
|
||||
|
||||
<server xmlns="urn:infinispan:server:15.0">
|
||||
<interfaces>
|
||||
<interface name="public">
|
||||
<inet-address value="${infinispan.bind.address:127.0.0.1}"/>
|
||||
</interface>
|
||||
</interfaces>
|
||||
|
||||
<socket-bindings default-interface="public" port-offset="${infinispan.socket.binding.port-offset:0}">
|
||||
<socket-binding name="default" port="${infinispan.bind.port:11222}"/>
|
||||
<socket-binding name="authenticated" port="11232"/>
|
||||
<socket-binding name="auth-tls" port="11242"/>
|
||||
</socket-bindings>
|
||||
|
||||
<security>
|
||||
<credential-stores>
|
||||
<credential-store name="credentials" path="credentials.pfx">
|
||||
<clear-text-credential clear-text="secret"/>
|
||||
</credential-store>
|
||||
</credential-stores>
|
||||
<security-realms>
|
||||
<security-realm name="default">
|
||||
<properties-realm groups-attribute="Roles">
|
||||
<user-properties path="/user-config/users.properties"/>
|
||||
<group-properties path="/user-config/groups.properties"/>
|
||||
</properties-realm>
|
||||
</security-realm>
|
||||
<security-realm name="tls">
|
||||
<!-- Uncomment to enable TLS on the realm -->
|
||||
<server-identities>
|
||||
<ssl>
|
||||
<keystore path="application.keystore"
|
||||
password="password" alias="server"
|
||||
generate-self-signed-certificate-host="localhost"/>
|
||||
|
||||
</ssl>
|
||||
</server-identities>
|
||||
<properties-realm groups-attribute="Roles">
|
||||
<user-properties path="/user-config/users.properties"/>
|
||||
<group-properties path="/user-config/groups.properties"/>
|
||||
</properties-realm>
|
||||
</security-realm>
|
||||
</security-realms>
|
||||
</security>
|
||||
|
||||
<endpoints>
|
||||
<endpoint socket-binding="default"/>
|
||||
<endpoint socket-binding="authenticated" security-realm="default"/>
|
||||
<endpoint socket-binding="auth-tls" security-realm="tls"/>
|
||||
</endpoints>
|
||||
</server>
|
||||
</infinispan>
|
@ -0,0 +1,4 @@
|
||||
#$REALM_NAME=default$
|
||||
#$ALGORITHM=encrypted$
|
||||
#Fri May 03 10:19:58 CEST 2024
|
||||
user=scram-sha-1\:BYGcIAws2gznU/kpezoSb1VQNVd+YMX9r+9SAINFoZtPHaHTAQ\=\=;scram-sha-256\:BYGcIAwRiWiD+8f7dyQEs1Wsum/64MOcjGJ2UcmZFQB6DZJqwRDJ4NrvII4NttmxlA\=\=;scram-sha-384\:BYGcIAz+Eud65N8GWK4TMwhSCZpeE5EFSdynywdryQj3ZwBEgv+KF8hRUuGxiq3EyRxsby6w7DHK3CICGZLsPrM\=;scram-sha-512\:BYGcIAwWxVY9DHn42kHydivyU3s9LSPmyfPPJkIFYyt/XsMASFHGoy5rzk4ahX4HjpJgb+NjdCwhGfi33CY0azUIrn439s62Yg5mq9i+ISto;digest-md5\:AgR1c2VyB2RlZmF1bHSYYyzPjRDR7MhrsdFSK03P;digest-sha\:AgR1c2VyB2RlZmF1bHTga5gDNnNYh7/2HqhBVOdUHjBzhw\=\=;digest-sha-256\:AgR1c2VyB2RlZmF1bHTig5qZQIxqtJBTUp3EMh5UIFoS4qOhz9Uk5aOW9ZKCfw\=\=;digest-sha-384\:AgR1c2VyB2RlZmF1bHT01pAN/pRMLS5afm4Q9S0kuLlA0NokuP8F0AISTwXCb1E8RMsFHlBVPOa5rC6Nyso\=;digest-sha-512\:AgR1c2VyB2RlZmF1bHTi+cHn1Ez2Ze41CvPXb9eP/7JmRys7m1f5qPMQWhAmDOuuUXNWEG4yKSI9k2EZgQvMKTd5hDbR24ul1BsYP8X5;
|
@ -0,0 +1,16 @@
|
||||
version: "3.7"
|
||||
|
||||
services:
|
||||
infinispan:
|
||||
image: quay.io/infinispan/server:15.0
|
||||
ports:
|
||||
- '11222:11222'
|
||||
- '11232:11232'
|
||||
- '11242:11242'
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 25Gb
|
||||
volumes:
|
||||
- ./conf:/user-config
|
||||
command: -c /user-config/infinispan.xml
|
@ -17,6 +17,17 @@ from tests.integration_tests.vectorstores.fake_embeddings import (
|
||||
)
|
||||
|
||||
|
||||
def _strip_docs(documents: List[Document]) -> List[Document]:
|
||||
return [_strip_doc(doc) for doc in documents]
|
||||
|
||||
|
||||
def _strip_doc(document: Document) -> Document:
|
||||
return Document(
|
||||
page_content=document.page_content,
|
||||
metadata=document.metadata,
|
||||
)
|
||||
|
||||
|
||||
def _vectorstore_from_texts(
|
||||
texts: List[str],
|
||||
metadatas: Optional[List[dict]] = None,
|
||||
@ -110,9 +121,9 @@ async def test_cassandra() -> None:
|
||||
texts = ["foo", "bar", "baz"]
|
||||
docsearch = _vectorstore_from_texts(texts)
|
||||
output = docsearch.similarity_search("foo", k=1)
|
||||
assert output == [Document(page_content="foo")]
|
||||
assert _strip_docs(output) == _strip_docs([Document(page_content="foo")])
|
||||
output = await docsearch.asimilarity_search("foo", k=1)
|
||||
assert output == [Document(page_content="foo")]
|
||||
assert _strip_docs(output) == _strip_docs([Document(page_content="foo")])
|
||||
|
||||
|
||||
async def test_cassandra_with_score() -> None:
|
||||
@ -130,13 +141,13 @@ async def test_cassandra_with_score() -> None:
|
||||
output = docsearch.similarity_search_with_score("foo", k=3)
|
||||
docs = [o[0] for o in output]
|
||||
scores = [o[1] for o in output]
|
||||
assert docs == expected_docs
|
||||
assert _strip_docs(docs) == _strip_docs(expected_docs)
|
||||
assert scores[0] > scores[1] > scores[2]
|
||||
|
||||
output = await docsearch.asimilarity_search_with_score("foo", k=3)
|
||||
docs = [o[0] for o in output]
|
||||
scores = [o[1] for o in output]
|
||||
assert docs == expected_docs
|
||||
assert _strip_docs(docs) == _strip_docs(expected_docs)
|
||||
assert scores[0] > scores[1] > scores[2]
|
||||
|
||||
|
||||
@ -239,7 +250,7 @@ async def test_cassandra_no_drop_async() -> None:
|
||||
def test_cassandra_delete() -> None:
|
||||
"""Test delete methods from vector store."""
|
||||
texts = ["foo", "bar", "baz", "gni"]
|
||||
metadatas = [{"page": i} for i in range(len(texts))]
|
||||
metadatas = [{"page": i, "mod2": i % 2} for i in range(len(texts))]
|
||||
docsearch = _vectorstore_from_texts([], metadatas=metadatas)
|
||||
|
||||
ids = docsearch.add_texts(texts, metadatas)
|
||||
@ -263,11 +274,21 @@ def test_cassandra_delete() -> None:
|
||||
output = docsearch.similarity_search("foo", k=10)
|
||||
assert len(output) == 0
|
||||
|
||||
docsearch.add_texts(texts, metadatas)
|
||||
num_deleted = docsearch.delete_by_metadata_filter({"mod2": 0}, batch_size=1)
|
||||
assert num_deleted == 2
|
||||
output = docsearch.similarity_search("foo", k=10)
|
||||
assert len(output) == 2
|
||||
docsearch.clear()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
docsearch.delete_by_metadata_filter({})
|
||||
|
||||
|
||||
async def test_cassandra_adelete() -> None:
|
||||
"""Test delete methods from vector store."""
|
||||
texts = ["foo", "bar", "baz", "gni"]
|
||||
metadatas = [{"page": i} for i in range(len(texts))]
|
||||
metadatas = [{"page": i, "mod2": i % 2} for i in range(len(texts))]
|
||||
docsearch = await _vectorstore_from_texts_async([], metadatas=metadatas)
|
||||
|
||||
ids = await docsearch.aadd_texts(texts, metadatas)
|
||||
@ -291,6 +312,16 @@ async def test_cassandra_adelete() -> None:
|
||||
output = docsearch.similarity_search("foo", k=10)
|
||||
assert len(output) == 0
|
||||
|
||||
await docsearch.aadd_texts(texts, metadatas)
|
||||
num_deleted = await docsearch.adelete_by_metadata_filter({"mod2": 0}, batch_size=1)
|
||||
assert num_deleted == 2
|
||||
output = await docsearch.asimilarity_search("foo", k=10)
|
||||
assert len(output) == 2
|
||||
await docsearch.aclear()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
await docsearch.adelete_by_metadata_filter({})
|
||||
|
||||
|
||||
def test_cassandra_metadata_indexing() -> None:
|
||||
"""Test comparing metadata indexing policies."""
|
||||
@ -316,3 +347,107 @@ def test_cassandra_metadata_indexing() -> None:
|
||||
with pytest.raises(ValueError):
|
||||
# "Non-indexed metadata fields cannot be used in queries."
|
||||
vstore_f1.similarity_search("bar", filter={"field2": "b"}, k=2)
|
||||
|
||||
|
||||
def test_cassandra_replace_metadata() -> None:
|
||||
"""Test of replacing metadata."""
|
||||
N_DOCS = 100
|
||||
REPLACE_RATIO = 2 # one in ... will have replaced metadata
|
||||
BATCH_SIZE = 3
|
||||
|
||||
vstore_f1 = _vectorstore_from_texts(
|
||||
texts=[],
|
||||
metadata_indexing=("allowlist", ["field1", "field2"]),
|
||||
table_name="vector_test_table_indexing",
|
||||
)
|
||||
orig_documents = [
|
||||
Document(
|
||||
page_content=f"doc_{doc_i}",
|
||||
id=f"doc_id_{doc_i}",
|
||||
metadata={"field1": f"f1_{doc_i}", "otherf": "pre"},
|
||||
)
|
||||
for doc_i in range(N_DOCS)
|
||||
]
|
||||
vstore_f1.add_documents(orig_documents)
|
||||
|
||||
ids_to_replace = [
|
||||
f"doc_id_{doc_i}" for doc_i in range(N_DOCS) if doc_i % REPLACE_RATIO == 0
|
||||
]
|
||||
|
||||
# various kinds of replacement at play here:
|
||||
def _make_new_md(mode: int, doc_id: str) -> dict[str, str]:
|
||||
if mode == 0:
|
||||
return {}
|
||||
elif mode == 1:
|
||||
return {"field2": f"NEW_{doc_id}"}
|
||||
elif mode == 2:
|
||||
return {"field2": f"NEW_{doc_id}", "ofherf2": "post"}
|
||||
else:
|
||||
return {"ofherf2": "post"}
|
||||
|
||||
ids_to_new_md = {
|
||||
doc_id: _make_new_md(rep_i % 4, doc_id)
|
||||
for rep_i, doc_id in enumerate(ids_to_replace)
|
||||
}
|
||||
|
||||
vstore_f1.replace_metadata(ids_to_new_md, batch_size=BATCH_SIZE)
|
||||
# thorough check
|
||||
expected_id_to_metadata: dict[str, dict] = {
|
||||
**{(document.id or ""): document.metadata for document in orig_documents},
|
||||
**ids_to_new_md,
|
||||
}
|
||||
for hit in vstore_f1.similarity_search("doc", k=N_DOCS + 1):
|
||||
assert hit.id is not None
|
||||
assert hit.metadata == expected_id_to_metadata[hit.id]
|
||||
|
||||
|
||||
async def test_cassandra_areplace_metadata() -> None:
|
||||
"""Test of replacing metadata."""
|
||||
N_DOCS = 100
|
||||
REPLACE_RATIO = 2 # one in ... will have replaced metadata
|
||||
BATCH_SIZE = 3
|
||||
|
||||
vstore_f1 = _vectorstore_from_texts(
|
||||
texts=[],
|
||||
metadata_indexing=("allowlist", ["field1", "field2"]),
|
||||
table_name="vector_test_table_indexing",
|
||||
)
|
||||
orig_documents = [
|
||||
Document(
|
||||
page_content=f"doc_{doc_i}",
|
||||
id=f"doc_id_{doc_i}",
|
||||
metadata={"field1": f"f1_{doc_i}", "otherf": "pre"},
|
||||
)
|
||||
for doc_i in range(N_DOCS)
|
||||
]
|
||||
await vstore_f1.aadd_documents(orig_documents)
|
||||
|
||||
ids_to_replace = [
|
||||
f"doc_id_{doc_i}" for doc_i in range(N_DOCS) if doc_i % REPLACE_RATIO == 0
|
||||
]
|
||||
|
||||
# various kinds of replacement at play here:
|
||||
def _make_new_md(mode: int, doc_id: str) -> dict[str, str]:
|
||||
if mode == 0:
|
||||
return {}
|
||||
elif mode == 1:
|
||||
return {"field2": f"NEW_{doc_id}"}
|
||||
elif mode == 2:
|
||||
return {"field2": f"NEW_{doc_id}", "ofherf2": "post"}
|
||||
else:
|
||||
return {"ofherf2": "post"}
|
||||
|
||||
ids_to_new_md = {
|
||||
doc_id: _make_new_md(rep_i % 4, doc_id)
|
||||
for rep_i, doc_id in enumerate(ids_to_replace)
|
||||
}
|
||||
|
||||
await vstore_f1.areplace_metadata(ids_to_new_md, concurrency=BATCH_SIZE)
|
||||
# thorough check
|
||||
expected_id_to_metadata: dict[str, dict] = {
|
||||
**{(document.id or ""): document.metadata for document in orig_documents},
|
||||
**ids_to_new_md,
|
||||
}
|
||||
for hit in await vstore_f1.asimilarity_search("doc", k=N_DOCS + 1):
|
||||
assert hit.id is not None
|
||||
assert hit.metadata == expected_id_to_metadata[hit.id]
|
||||
|
@ -1,7 +1,9 @@
|
||||
"""Test Infinispan functionality."""
|
||||
|
||||
import warnings
|
||||
from typing import Any, List, Optional
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from langchain_core.documents import Document
|
||||
|
||||
@ -11,9 +13,18 @@ from tests.integration_tests.vectorstores.fake_embeddings import (
|
||||
fake_texts,
|
||||
)
|
||||
|
||||
"""
|
||||
cd tests/integration_tests/vectorstores/docker-compose
|
||||
./infinispan.sh
|
||||
|
||||
def _infinispan_setup_noautoconf() -> None:
|
||||
ispnvs = InfinispanVS(auto_config=False)
|
||||
Current Infinispan implementation relies on httpx: `pip install "httpx[http2]"`
|
||||
if not installed. HTTP/2 is enable by default, if it's not
|
||||
wanted use `pip install "httpx"`.
|
||||
"""
|
||||
|
||||
|
||||
def _infinispan_setup_noautoconf(**kwargs: Any) -> None:
|
||||
ispnvs = InfinispanVS(http2=_hasHttp2(), auto_config=False, **kwargs)
|
||||
ispnvs.cache_delete()
|
||||
ispnvs.schema_delete()
|
||||
proto = """
|
||||
@ -54,64 +65,104 @@ def _infinispanvs_from_texts(
|
||||
ids=ids,
|
||||
clear_old=clear_old,
|
||||
auto_config=auto_config,
|
||||
http2=_hasHttp2(),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def _hasHttp2() -> bool:
|
||||
try:
|
||||
httpx.Client(http2=True)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
@pytest.mark.parametrize("autoconfig", [False, True])
|
||||
@pytest.mark.parametrize(
|
||||
"conn_opts",
|
||||
[
|
||||
{},
|
||||
{
|
||||
"user": "user",
|
||||
"password": "password",
|
||||
"hosts": ["localhost:11232"],
|
||||
"schema": "http",
|
||||
},
|
||||
{
|
||||
"user": "user",
|
||||
"password": "password",
|
||||
"hosts": ["localhost:11242"],
|
||||
"schema": "https",
|
||||
"verify": False,
|
||||
},
|
||||
],
|
||||
)
|
||||
class TestBasic:
|
||||
def test_infinispan(self, autoconfig: bool) -> None:
|
||||
def test_infinispan(self, autoconfig: bool, conn_opts: dict) -> None:
|
||||
"""Test end to end construction and search."""
|
||||
if not autoconfig:
|
||||
_infinispan_setup_noautoconf()
|
||||
docsearch = _infinispanvs_from_texts(auto_config=autoconfig)
|
||||
_infinispan_setup_noautoconf(**conn_opts)
|
||||
docsearch = _infinispanvs_from_texts(auto_config=autoconfig, **conn_opts)
|
||||
output = docsearch.similarity_search("foo", k=1)
|
||||
assert output == [Document(page_content="foo")]
|
||||
|
||||
def test_infinispan_with_metadata(self, autoconfig: bool) -> None:
|
||||
def test_infinispan_with_auth(self, autoconfig: bool, conn_opts: dict) -> None:
|
||||
"""Test end to end construction and search."""
|
||||
if not autoconfig:
|
||||
_infinispan_setup_noautoconf(**conn_opts)
|
||||
docsearch = _infinispanvs_from_texts(auto_config=autoconfig, **conn_opts)
|
||||
output = docsearch.similarity_search("foo", k=1)
|
||||
assert output == [Document(page_content="foo")]
|
||||
|
||||
def test_infinispan_with_metadata(self, autoconfig: bool, conn_opts: dict) -> None:
|
||||
"""Test with metadata"""
|
||||
if not autoconfig:
|
||||
_infinispan_setup_noautoconf()
|
||||
_infinispan_setup_noautoconf(**conn_opts)
|
||||
meta = []
|
||||
for _ in range(len(fake_texts)):
|
||||
meta.append({"label": "test"})
|
||||
docsearch = _infinispanvs_from_texts(metadatas=meta, auto_config=autoconfig)
|
||||
docsearch = _infinispanvs_from_texts(
|
||||
metadatas=meta, auto_config=autoconfig, **conn_opts
|
||||
)
|
||||
output = docsearch.similarity_search("foo", k=1)
|
||||
assert output == [Document(page_content="foo", metadata={"label": "test"})]
|
||||
|
||||
def test_infinispan_with_metadata_with_output_fields(
|
||||
self, autoconfig: bool
|
||||
self, autoconfig: bool, conn_opts: dict
|
||||
) -> None:
|
||||
"""Test with metadata"""
|
||||
if not autoconfig:
|
||||
_infinispan_setup_noautoconf()
|
||||
_infinispan_setup_noautoconf(**conn_opts)
|
||||
metadatas = [
|
||||
{"page": i, "label": "label" + str(i)} for i in range(len(fake_texts))
|
||||
]
|
||||
c = {"output_fields": ["label", "page", "text"]}
|
||||
docsearch = _infinispanvs_from_texts(
|
||||
metadatas=metadatas, configuration=c, auto_config=autoconfig
|
||||
metadatas=metadatas, configuration=c, auto_config=autoconfig, **conn_opts
|
||||
)
|
||||
output = docsearch.similarity_search("foo", k=1)
|
||||
assert output == [
|
||||
Document(page_content="foo", metadata={"label": "label0", "page": 0})
|
||||
]
|
||||
|
||||
def test_infinispanvs_with_id(self, autoconfig: bool) -> None:
|
||||
def test_infinispanvs_with_id(self, autoconfig: bool, conn_opts: dict) -> None:
|
||||
"""Test with ids"""
|
||||
ids = ["id_" + str(i) for i in range(len(fake_texts))]
|
||||
docsearch = _infinispanvs_from_texts(ids=ids, auto_config=autoconfig)
|
||||
docsearch = _infinispanvs_from_texts(
|
||||
ids=ids, auto_config=autoconfig, **conn_opts
|
||||
)
|
||||
output = docsearch.similarity_search("foo", k=1)
|
||||
assert output == [Document(page_content="foo")]
|
||||
|
||||
def test_infinispan_with_score(self, autoconfig: bool) -> None:
|
||||
def test_infinispan_with_score(self, autoconfig: bool, conn_opts: dict) -> None:
|
||||
"""Test end to end construction and search with scores and IDs."""
|
||||
if not autoconfig:
|
||||
_infinispan_setup_noautoconf()
|
||||
_infinispan_setup_noautoconf(**conn_opts)
|
||||
texts = ["foo", "bar", "baz"]
|
||||
metadatas = [{"page": i} for i in range(len(texts))]
|
||||
docsearch = _infinispanvs_from_texts(
|
||||
metadatas=metadatas, auto_config=autoconfig
|
||||
metadatas=metadatas, auto_config=autoconfig, **conn_opts
|
||||
)
|
||||
output = docsearch.similarity_search_with_score("foo", k=3)
|
||||
docs = [o[0] for o in output]
|
||||
@ -123,14 +174,14 @@ class TestBasic:
|
||||
]
|
||||
assert scores[0] >= scores[1] >= scores[2]
|
||||
|
||||
def test_infinispan_add_texts(self, autoconfig: bool) -> None:
|
||||
def test_infinispan_add_texts(self, autoconfig: bool, conn_opts: dict) -> None:
|
||||
"""Test end to end construction and MRR search."""
|
||||
if not autoconfig:
|
||||
_infinispan_setup_noautoconf()
|
||||
_infinispan_setup_noautoconf(**conn_opts)
|
||||
texts = ["foo", "bar", "baz"]
|
||||
metadatas = [{"page": i} for i in range(len(texts))]
|
||||
docsearch = _infinispanvs_from_texts(
|
||||
metadatas=metadatas, auto_config=autoconfig
|
||||
metadatas=metadatas, auto_config=autoconfig, **conn_opts
|
||||
)
|
||||
|
||||
docsearch.add_texts(texts, metadatas)
|
||||
@ -138,19 +189,22 @@ class TestBasic:
|
||||
output = docsearch.similarity_search("foo", k=10)
|
||||
assert len(output) == 6
|
||||
|
||||
def test_infinispan_no_clear_old(self, autoconfig: bool) -> None:
|
||||
def test_infinispan_no_clear_old(self, autoconfig: bool, conn_opts: dict) -> None:
|
||||
"""Test end to end construction and MRR search."""
|
||||
if not autoconfig:
|
||||
_infinispan_setup_noautoconf()
|
||||
_infinispan_setup_noautoconf(**conn_opts)
|
||||
texts = ["foo", "bar", "baz"]
|
||||
metadatas = [{"page": i} for i in range(len(texts))]
|
||||
docsearch = _infinispanvs_from_texts(
|
||||
metadatas=metadatas, auto_config=autoconfig
|
||||
metadatas=metadatas, auto_config=autoconfig, **conn_opts
|
||||
)
|
||||
del docsearch
|
||||
try:
|
||||
docsearch = _infinispanvs_from_texts(
|
||||
metadatas=metadatas, clear_old=False, auto_config=autoconfig
|
||||
metadatas=metadatas,
|
||||
clear_old=False,
|
||||
auto_config=autoconfig,
|
||||
**conn_opts,
|
||||
)
|
||||
except AssertionError:
|
||||
if autoconfig:
|
||||
@ -159,3 +213,12 @@ class TestBasic:
|
||||
raise
|
||||
output = docsearch.similarity_search("foo", k=10)
|
||||
assert len(output) == 6
|
||||
|
||||
|
||||
class TestHttp2:
|
||||
def test_http2(self) -> None:
|
||||
try:
|
||||
httpx.Client(http2=True)
|
||||
except Exception:
|
||||
warnings.warn('pip install "httpx[http2]" if you need HTTP/2')
|
||||
pass
|
||||
|
@ -248,8 +248,14 @@ class InMemoryRateLimiter(BaseRateLimiter):
|
||||
if not blocking:
|
||||
return self._consume()
|
||||
|
||||
while not self._consume(): # noqa: ASYNC110
|
||||
await asyncio.sleep(self.check_every_n_seconds)
|
||||
while not self._consume():
|
||||
# This code ignores the ASYNC110 warning which is a false positive in this
|
||||
# case.
|
||||
# There is no external actor that can mark that the Event is done
|
||||
# since the tokens are managed by the rate limiter itself.
|
||||
# It needs to wake up to re-fill the tokens.
|
||||
# https://docs.astral.sh/ruff/rules/async-busy-wait/
|
||||
await asyncio.sleep(self.check_every_n_seconds) # ruff: noqa: ASYNC110
|
||||
return True
|
||||
|
||||
|
||||
|
404
libs/core/poetry.lock
generated
404
libs/core/poetry.lock
generated
@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
@ -902,13 +902,13 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema-specifications"
|
||||
version = "2023.12.1"
|
||||
version = "2024.10.1"
|
||||
description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"},
|
||||
{file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"},
|
||||
{file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"},
|
||||
{file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -1215,13 +1215,13 @@ url = "../text-splitters"
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.1.129"
|
||||
version = "0.1.132"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langsmith-0.1.129-py3-none-any.whl", hash = "sha256:31393fbbb17d6be5b99b9b22d530450094fab23c6c37281a6a6efb2143d05347"},
|
||||
{file = "langsmith-0.1.129.tar.gz", hash = "sha256:6c3ba66471bef41b9f87da247cc0b493268b3f54656f73648a256a205261b6a0"},
|
||||
{file = "langsmith-0.1.132-py3-none-any.whl", hash = "sha256:2320894203675c1c292b818cbecf68b69e47a9f7814d4e950237d1faaafd5dee"},
|
||||
{file = "langsmith-0.1.132.tar.gz", hash = "sha256:007b8fac469138abdba89db931900a26c5d316640e27ff4660d28c92a766aae1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -1232,74 +1232,76 @@ pydantic = [
|
||||
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
||||
]
|
||||
requests = ">=2,<3"
|
||||
requests-toolbelt = ">=1.0.0,<2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "markupsafe"
|
||||
version = "2.1.5"
|
||||
version = "3.0.1"
|
||||
description = "Safely add untrusted strings to HTML/XML markup."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
|
||||
{file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
|
||||
{file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
|
||||
{file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
|
||||
{file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
|
||||
{file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
|
||||
{file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
|
||||
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
|
||||
{file = "MarkupSafe-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1"},
|
||||
{file = "MarkupSafe-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a"},
|
||||
{file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589"},
|
||||
{file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170"},
|
||||
{file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca"},
|
||||
{file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea"},
|
||||
{file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6"},
|
||||
{file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25"},
|
||||
{file = "MarkupSafe-3.0.1-cp310-cp310-win32.whl", hash = "sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97"},
|
||||
{file = "MarkupSafe-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9"},
|
||||
{file = "MarkupSafe-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad"},
|
||||
{file = "MarkupSafe-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583"},
|
||||
{file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7"},
|
||||
{file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b"},
|
||||
{file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3"},
|
||||
{file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50"},
|
||||
{file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915"},
|
||||
{file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91"},
|
||||
{file = "MarkupSafe-3.0.1-cp311-cp311-win32.whl", hash = "sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635"},
|
||||
{file = "MarkupSafe-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf"},
|
||||
{file = "MarkupSafe-3.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4"},
|
||||
{file = "MarkupSafe-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5"},
|
||||
{file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346"},
|
||||
{file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729"},
|
||||
{file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc"},
|
||||
{file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9"},
|
||||
{file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b"},
|
||||
{file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38"},
|
||||
{file = "MarkupSafe-3.0.1-cp312-cp312-win32.whl", hash = "sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa"},
|
||||
{file = "MarkupSafe-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313-win32.whl", hash = "sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313t-win32.whl", hash = "sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b"},
|
||||
{file = "MarkupSafe-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295"},
|
||||
{file = "MarkupSafe-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132"},
|
||||
{file = "MarkupSafe-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a"},
|
||||
{file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8"},
|
||||
{file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6"},
|
||||
{file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b"},
|
||||
{file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b"},
|
||||
{file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd"},
|
||||
{file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a"},
|
||||
{file = "MarkupSafe-3.0.1-cp39-cp39-win32.whl", hash = "sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8"},
|
||||
{file = "MarkupSafe-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b"},
|
||||
{file = "markupsafe-3.0.1.tar.gz", hash = "sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2084,25 +2086,29 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pywin32"
|
||||
version = "306"
|
||||
version = "307"
|
||||
description = "Python for Window Extensions"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"},
|
||||
{file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"},
|
||||
{file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"},
|
||||
{file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"},
|
||||
{file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"},
|
||||
{file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"},
|
||||
{file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"},
|
||||
{file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"},
|
||||
{file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"},
|
||||
{file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"},
|
||||
{file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"},
|
||||
{file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"},
|
||||
{file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"},
|
||||
{file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"},
|
||||
{file = "pywin32-307-cp310-cp310-win32.whl", hash = "sha256:f8f25d893c1e1ce2d685ef6d0a481e87c6f510d0f3f117932781f412e0eba31b"},
|
||||
{file = "pywin32-307-cp310-cp310-win_amd64.whl", hash = "sha256:36e650c5e5e6b29b5d317385b02d20803ddbac5d1031e1f88d20d76676dd103d"},
|
||||
{file = "pywin32-307-cp310-cp310-win_arm64.whl", hash = "sha256:0c12d61e0274e0c62acee79e3e503c312426ddd0e8d4899c626cddc1cafe0ff4"},
|
||||
{file = "pywin32-307-cp311-cp311-win32.whl", hash = "sha256:fec5d27cc893178fab299de911b8e4d12c5954e1baf83e8a664311e56a272b75"},
|
||||
{file = "pywin32-307-cp311-cp311-win_amd64.whl", hash = "sha256:987a86971753ed7fdd52a7fb5747aba955b2c7fbbc3d8b76ec850358c1cc28c3"},
|
||||
{file = "pywin32-307-cp311-cp311-win_arm64.whl", hash = "sha256:fd436897c186a2e693cd0437386ed79f989f4d13d6f353f8787ecbb0ae719398"},
|
||||
{file = "pywin32-307-cp312-cp312-win32.whl", hash = "sha256:07649ec6b01712f36debf39fc94f3d696a46579e852f60157a729ac039df0815"},
|
||||
{file = "pywin32-307-cp312-cp312-win_amd64.whl", hash = "sha256:00d047992bb5dcf79f8b9b7c81f72e0130f9fe4b22df613f755ab1cc021d8347"},
|
||||
{file = "pywin32-307-cp312-cp312-win_arm64.whl", hash = "sha256:b53658acbfc6a8241d72cc09e9d1d666be4e6c99376bc59e26cdb6223c4554d2"},
|
||||
{file = "pywin32-307-cp313-cp313-win32.whl", hash = "sha256:ea4d56e48dc1ab2aa0a5e3c0741ad6e926529510516db7a3b6981a1ae74405e5"},
|
||||
{file = "pywin32-307-cp313-cp313-win_amd64.whl", hash = "sha256:576d09813eaf4c8168d0bfd66fb7cb3b15a61041cf41598c2db4a4583bf832d2"},
|
||||
{file = "pywin32-307-cp313-cp313-win_arm64.whl", hash = "sha256:b30c9bdbffda6a260beb2919f918daced23d32c79109412c2085cbc513338a0a"},
|
||||
{file = "pywin32-307-cp37-cp37m-win32.whl", hash = "sha256:5101472f5180c647d4525a0ed289ec723a26231550dbfd369ec19d5faf60e511"},
|
||||
{file = "pywin32-307-cp37-cp37m-win_amd64.whl", hash = "sha256:05de55a7c110478dc4b202230e98af5e0720855360d2b31a44bb4e296d795fba"},
|
||||
{file = "pywin32-307-cp38-cp38-win32.whl", hash = "sha256:13d059fb7f10792542082f5731d5d3d9645320fc38814759313e5ee97c3fac01"},
|
||||
{file = "pywin32-307-cp38-cp38-win_amd64.whl", hash = "sha256:7e0b2f93769d450a98ac7a31a087e07b126b6d571e8b4386a5762eb85325270b"},
|
||||
{file = "pywin32-307-cp39-cp39-win32.whl", hash = "sha256:55ee87f2f8c294e72ad9d4261ca423022310a6e79fb314a8ca76ab3f493854c6"},
|
||||
{file = "pywin32-307-cp39-cp39-win_amd64.whl", hash = "sha256:e9d5202922e74985b037c9ef46778335c102b74b95cec70f629453dbe7235d87"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2339,6 +2345,20 @@ urllib3 = ">=1.21.1,<3"
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "requests-toolbelt"
|
||||
version = "1.0.0"
|
||||
description = "A utility belt for advanced users of python-requests"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
{file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"},
|
||||
{file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
requests = ">=2.0.1,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "responses"
|
||||
version = "0.25.3"
|
||||
@ -2556,104 +2576,110 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (
|
||||
|
||||
[[package]]
|
||||
name = "simsimd"
|
||||
version = "5.4.3"
|
||||
version = "5.6.0"
|
||||
description = "Fastest SIMD-Accelerated Vector Similarity Functions for x86 and Arm"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "simsimd-5.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:83baefb29993842e6877f1df11d70d8524ab0da2a50214c9e804279b3a82330e"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:deacf8310437e18d83d5fc1e6b0212060f65d1a76f6941978637724ac0b4195e"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a369099fef78322e2ff11cff127d91e0a1a406807f1c68d5354cf53b22a5693a"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0881ce6f96c118567cf110648e93380c1046551be0603634aa31a6e80e28365a"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96ba2e86a1904d2fdc63a60c19054d65fecf753f0464311be3aa5de06dea99f1"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:434e0a35d7e9485c058faf15a99c8ff108ccae94c4d7831be9aaf772cc2d6e8a"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:f9460bafcef2d6c484f9971fbcdefa0eed60db3c5f41631ca6125518e0aabd66"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a068f6d30143262646d31f6e7573acf89601ca9f3583a9e4bca2d732057481d9"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5ca7b165ee928acb19ff129b6d3776a2cb983e31d62fb5480a209e1aa45065be"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:269c7d32ce9bbc8f99dc63ef1c87faf4d454cbf4aa569d77a4dbbb8fc4542bfb"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9bc53af558a7a33046091275e760f16421963870b779be1e243bf192793bf63"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:d6f5e2f53f932e7ddf5a81aeb6bcbb5e8f7a3b46dc0cd5de6c91265859100f54"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b4dc2736276a5ada456a052828e7e4ba4db9bfb6dfb7c9b16ba5f33160876377"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-win32.whl", hash = "sha256:fc2c8b1f92d9ab0a73a57282eea3fd8dac79f50517ac76d81a4646e9037b1ffa"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:d744be6222edd143d777b40fa55de23889c281ae505a3c8d650a19e4266c0518"},
|
||||
{file = "simsimd-5.4.3-cp310-cp310-win_arm64.whl", hash = "sha256:804f37fd965115cd0a960551c305f68d8fada6bb27b33a67038306bae6d3bc6a"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:32f6d6336c1cca6129ea7907d64f4fb91eab64fce8bdfa224098003953801ab5"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:470aa4fbc03d49a6f021985f0bc18a772f9a55f44cb1328052dbbd027412db1e"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01d4c36f38f925555a1b85b16aa688b69d5df7a8be91b6c81ea1675486cf8242"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b8130b4d82c2eb02c40a05f00e277a5dca290332777a3b011cb9adc6512439"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd05002f204da0f25d1e5b0c5e3dfa550a8742c272b09db8ecf7799ec21d67d"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d674cd79d64e87fa772693275c49ce38f9893bf0b4d9fb08e45952ae54ba707b"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c7d00820f4dfca80eb86cd3c70997bf6590f54557a92bee7ff868844a88376e3"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e960d14f77818e18f7629258f8debd08b5ad3cd62dab2ae53fb5ff5b54f7c08c"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f8af17e36af232014286c854473ad78758616ed7bcb7cf5a5762b8a5b1b074e5"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:436a17bd78372e86f7d01ecfff16f75c573a44f1037b5e4199f7a2ecc1a31de9"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:75f8f0fd937c2935e96da41fe4380f3705e1eec0f3433a643ac9384ed689eb9c"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1c098aa69fd29a53a350a5a06e0da7ff25258bc76d0deccdf87ba3c862e35fe0"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bbdb970f64cce2496c7bc54463610c9ddaa43615aa73b6fb1bc1f0df5863565"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-win32.whl", hash = "sha256:aa58035f67b2c7083cc05a4c49b06b57a0aa15dfbb9608ed1e83ab59b965768f"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:de14931d37087ccfbd1cc6321ad22ce76fef51fc75c98264ff2537170062d361"},
|
||||
{file = "simsimd-5.4.3-cp311-cp311-win_arm64.whl", hash = "sha256:d5474b49d97546b55940c27fcd63fce159f5f939cc53983f91bae56cea34dd52"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31b1fd618dab62c49cca99fd78e4876b511273ead272584b34aeaca5e5ded532"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:43487d7f9756d5f8de6f778910b0fd8aa236716612ccd3afe50e8d8c50f1c20b"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ccd196db72fd716664441c6c0f3077f5d0d5ae6c15454a39c23b7fd729fae5d0"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ea3e3dcb0471d9fb2dd8d09a84826e859d84f55fc01ec0b89df4c7b9bc67d96"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db4b4fb06397d9689e2fab06b822a6432b85da99c92e060836b532ad94be0dea"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:576ebb0bcfeca4086d12925ab3130167ea2b89021d19c09f9b14cccc0e22c739"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:87c6bffa7275e250ac2d4605cccb66b0652ad033b734d2a7539eabc3eedbd882"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b48ab95d9a4bc8fd9ce9ce137b36e125e64a114fdd352f2f7a057e92dfbe4571"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f10dca118c1e7a9a89ccf32eb76f8eb09eb963817ee8817379e73ac1c11d55b3"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba1472565cb786a4f537877fb15cdaaac139369d6eda14d4bfce6f4cf794b72"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ef92e699883da0f42b2513f4e923480516595a8da883aeae90980c02d8c40958"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:adc121c840b306a9b84506a02b00dab8f1e3db152b38fb1f72e5796be7d977b2"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:04381217e516e597ebc9d68e37341e2b32677d8de847041e94654834ec6fc366"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-win32.whl", hash = "sha256:b087189a27f925f348b40c34b7e11aaae9161e764a6376614620fb993f41d9fd"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:f409ce2dc0b6bac7240432b6e7d7f44c9234a60bf7369bced16d2275dcb8afa1"},
|
||||
{file = "simsimd-5.4.3-cp312-cp312-win_arm64.whl", hash = "sha256:77cfe8260aa86a1fb27f884dc13d1ebb25e423a470e71020ea2037ea8d54c14d"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38d21af2860d2effb8bb1af344ca64645ec3e2251497b79a2c80ea5582ae6983"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ee8e2ef9856e10e36fda82d13275870bd2d73c0a67f90a3120ecfb03c5448ba"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3f7b6abe31ebdc7044787db91cf3deb8c9612620895af95474cc8a1f146e071"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4ec0e3df45f5c130fe92e4df91ded84ffc251f8fa802c62ffdbeef46a8d9a17"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:186a5f4bece780a3c6a161867548b35317c20247e5a07cacd33df7254d2baab9"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:82aea3431f2fc025148ed776c290f5b88a16a5c5c857b92a2a9d44e35097b811"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:698b62fd3dddd54e05dcb4989d3805cfa22f8112260855f6dcfd8a9272a938d9"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:2f1b19db892fcc3ef3fd21e474a819e63f6f1ac5ba3fb916d0776f5cbdae4b16"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:b24ab704c3aaac0fe1538b4fac69b3402847206d6735b5d9d476549712c65ec3"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2592574bfa52a5e951568927a518ab873d6e1328c27cbddc8cf656f9d2ac4079"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:8b7e9495aa97f28f0ac727d23bd469c756583cc9b3288c8673170c83ea2e4195"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-win32.whl", hash = "sha256:f4a5fff030bf1528e3667b257f477a6a8f4e098372b89b24b6e12b7cca863a39"},
|
||||
{file = "simsimd-5.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:57eab9d511b72ebf3f86f9d639d54a8876a1794047621e1b64adaeb4e5f24d10"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:82487401d13f08553a0bf5206a723779530581d4e7801fe7091a53389581d36f"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a57e07fb8c67339c4e34ae760d998e75876952fcada803250772c974b9cd54bc"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:433c6fcb8fdcfba3c7c81e568e32335768b0f0cae9b5f4aa8805e621ab6ac871"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1289252bd2be60fe1d030d33eb3b73459efb34cdd63685e04c37c5e3e963d4a"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83e58500f5b4eb1e182054c94a91872bf69471bc963355ab13e7f7d5a3d36ae"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95970e5a268eee6eacb9220ed19d60aa8e1751d01af1e76adf0bf614ec58962d"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0c334a0ded63acf1aa0a9ea720cae9a186c81eb5354c4b2b09ed57d8cc652215"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:29f0a8a8cc3a867dbeab39203572077db23d338fb01fd48ac8e6c48ab3bf8b38"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ac7c0fb5b70e5fdd13a6b2d08477f302674af7ae53c627e0413273d9afe5a245"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:8d9688a122c8dcb8e22f27676a76377625e3a3b607a2172559cb0d85453364d9"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:aeb450216526ca05d200f7cc59147233f039e24afbc42ab805adb9e540de9852"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:8209f3c1583a3ce1a6503d389383476a51bfe8990ecf81d7d679276d52eb40d4"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a03e984b0941b7555c0f62968b5d5e7b8132bffbb17b6dfb7219876cd95fc6c4"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-win32.whl", hash = "sha256:24c5be9c34d85a8e728e2e322884dc39169fddbbccbe47e970a3c2b872d1efee"},
|
||||
{file = "simsimd-5.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:7d751d4f85a9c3ab6d62b417c5aa6e5972ca6e13425658a01f14632b916d2327"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ba4ec918b19c578e1039266e1b3fc5792fce3e3e64c8a3afc6cef11c6f6abe4c"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37b1f392bbe0fecfb9da301ff36dd0b7915e244b2433e822216171d5ee9b53f8"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ac97eb8a2ba3771e905a28bffb5894516d15b775af22d8b9b55eae3fe3d6673e"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6ac311333239d03b7b6726ebba8025a409ecb7e0eca9301f967fb88c9ec1055"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44de5372968851de5c972faab9574a9667312dfedc69c626bd47dc1a0d730bf3"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64bab8107a19ac05067a143039cc03c14b4bd9ee95973fc7f7bae7e34e857eff"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:31f11b649448fb68061b0ff131c93215940b6dff24357ec9763abe80406db133"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:9d6ad1ed348779a07410b37baa0057dbc402c6d5785f86ad8a784cec6c642f36"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b848f4f7da3c2d9fc654dd4815b7be99161c954d88af4f948f65f6df3c671671"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:44c731383c92cb165b907fd88c2c3f6b5a13102358cae4eb881950b4faf2f207"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:adc0b4df3e2739590f1e8e4d2cc12fedb57e8d015430dd6014e7fe5aab0c4bdd"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ffaef86f0c7d93e40d19773d3c1c23a5facc68ea1e094db98a6541458cb49aef"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d4774e00d72babac1f7685b9b0e2ff721da312301e8656687e89071d7e4d4b8b"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-win32.whl", hash = "sha256:d582b3eff101cc8e920407ef08c97e34e1738e829bc58816c0d402bcb5674183"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:777809b143ec9f3ff18bd20eb140367a62717c948b907846f9389e6ae3d1b88c"},
|
||||
{file = "simsimd-5.4.3-cp39-cp39-win_arm64.whl", hash = "sha256:0637cfb2e99f3d8b78eb3d84d3a63b47a413a3a1702314d74ed28b861b0c0e7c"},
|
||||
{file = "simsimd-5.4.3.tar.gz", hash = "sha256:dc75ae8bc0bff160439f669eb664cf47cc24d1a450c93b6bb53d8eee68794956"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f8d33b464c417f2c589bd773c58499f6ee0460600e5e98494859dbcfa492d94b"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c3f544ecc9a9efafcad312a66983c06aa8caeab9032e6f8dd1ce09b09536cfd3"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e7dd541fe429a9af17e46b254305e8be2128cb6171ff6518b78328466d2618a"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:572b761b6ccd1eb0c800c82aefc286ad334d4d2a7d27f28a3b39083135b66e70"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b644b12aa0705298f4ece17519cbd338a90ae90a8a8bdeb999dc82beaba5c0f"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c90f9272dec482331b9c25cdf64bda36b15aada64628eec7b5b424362ef5d48f"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c17d535a59b0cf81256ff7acc05445e0616c0878945421bf2b5f1225b1c95401"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:190af5a63ed12d9ba543cb9db48fcd05cac20fc1ce543fcfdc8d18e0eae3e220"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5ebe799537d472b9a7711c040b2ad9fbd6774f7eb796114560124312f798246f"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:62ad65dcbaf13664f1b1d56b1de55d8246e0102464257908cb5357dd87975eb9"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eaaa6530de3b53bb43dae58603d0bbfcf55a7e0a97a40dda5d995fab9f836473"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3e9ef5eba301b9c98e580c144630c787de64a880c669d47661710b9dfe611d40"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:621cfa718146fdda3f9a0288b441d7397128fea32953944c9de856a2c041b790"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3850c0b4eafa0077a0d409c82b9ce205590826fc2d0e6046b12e32ae87037f52"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-win32.whl", hash = "sha256:f29a482b446518425ed1543c7891a5c2f50bfd2a5022f35d8ff1d777c101d877"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:147733e90d2a76f01d4f13ef4ba39e727a1d78272fee87c816d6ddc9101812b0"},
|
||||
{file = "simsimd-5.6.0-cp310-cp310-win_arm64.whl", hash = "sha256:e5916b329d2f3063829f9010feb6288fb4f787ec7597edeb2bcb6243cae1f243"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5bd0148a9ee640d19e58a89c9bee3008e5858059fe29dd51a1351cc1e675dc9c"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1924fc803fc9765ed00fa4b5d600d924ef3a47b0c1e74511366f581627be3b4e"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3a35bc907ebd00e446428c8906315043c6906c9c1f09ae73249eb3304799132"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb876608cc0df37143558c4e88f62c11d356b7c60379d442ec5e436cc20523ff"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bec8872d1fa55aa8aeb795447d8c56bb21fa271f6ec3156c42efce7f823e6345"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe179e6c76b7a0f6ba2ea6a716a7eb81ca3d4b147abe3552b442bdec23fcd18"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9de9e24797818cf954b89ff466f737487a45ea41a778109eeb3093ba402d0840"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a02577614d1e70fbaf026d837d26ab663e70ffbf1fe76d56cef32494a495d803"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d14e953a6a0b85940044689a1cabf4bc6b79cc0647d625ae5ea17265051a4773"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:b46031cbb79777ca3e7c83c148fba7203f79e4da96550ffa64ac5e0dac06c2f1"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3a006db5ccce07f1434f71353a62590c70375dd9118d95f8f5bf957520e5e5e4"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b726dcf2bad0ac3d402fefb0633261ad23f5e11cf0e74298d6fc0efac067234b"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f96c492ba79cf152aa4328e1db847ac4de855838c9ba72d60c9d08ff5a7dd038"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7ff56e35b80ffdb224e54f2dfb22a34ba04271d3f4185b7af688e38f695e87c9"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-win32.whl", hash = "sha256:d715d1556e994883c88d76a1aa857b46930d03581b6a4d7dadc24eb0cb71851f"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:7269a7e01454972e5bf51c66b9984489a8629208ce15ef9baa6ca7c7c0e0b5fe"},
|
||||
{file = "simsimd-5.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:330d8ab961d6a7c2b363340a39a19436a0239c5cea8b86b664fc0d2d65971619"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0530a75c5646219fe96c5a5873abfe8f9c160be407b5621cfdb5aeedfaa143a8"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:67b58cbb6d2da4c2e5239cf9e2e66f5707c0a337561cca778b70a3d4dc8b62b0"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7a8fe31d558145ec777733e80194ea5b03b3b7f278c243567457fdcc5ee89aeb"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3012816771203692eb020530dd28a571515d8241d9f9d73a1e2065f642baed1"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18bf024f0a2bc0afe6d20c97921d9c07415b9dd27604f28cf2638773faf8b804"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637779de503b56043ab78aec5476525ab6741ca2990453a658f8f9b3d3e28fba"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e6c549a33fb373a43c2f669242dad016b5e3bf11c2e94de17ada78efcaa5d6a7"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e8b24711cdeae5ecdd188fb7347b2367a6446a1576fad0ae5da90dafb0b0aefb"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e435b99a260f25e922620821ea1ec84901c553e72564c05d9e6f214f128caa9"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:57f75e7ff13485c22fa5bbae82845c309e19d395a904f22c73f2bf9ece2dc88c"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cee9039134f161ead6727f59bea43b3af32049fa8486340d41e5cbcaae5bdd80"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9644b50913c2300001e74a67d63ba09efdbd4730e11f8c423c17b7c7695f36dd"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a510b69fceb87b16592dd7b3f03f428591a3f78a4ce65831f63eb893dfc3231b"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5cecf017159157f8eca5b7f232268e93f8d48f312c7f856630a3563bc3389fa9"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-win32.whl", hash = "sha256:bf137a84f97a3be3f776ba7049d9f3c0b49679c0aa9035655eb9aa44af8019da"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:03202498adb8a87ca6e1398d9fbfaf0b34d17cf36dd8724b39ad4cae7cda85b5"},
|
||||
{file = "simsimd-5.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:e641a7904ffd9f185ae0c7c827c6621348742a5cd885e1778ced337cd8866023"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d43f22e9206a4a85b2e85e3345cd1221e5979cd3fc3cf2724bded231574f92ff"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5efd8344a3a9f8782ff183d0c985d0860079264d866c01d72802b81c893d377a"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a322263ffd99ddb7f92c9be4dca9b01ef66515e61759048d7d172a5f062507d"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55618e1a096fb911cf131fb0191d20c3f146c2cfd69e00cc1aab8909ed34ebcc"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4051f778da068e2ee2a401d51b16d96470eb5e6b2a33f2c5a874a263ccec93b9"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:7c3e40be8554125feb0c7cd6aed9483d0278859925aab4b05ba7f5c8790ba8ea"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:fcd59471f39eac654ef26bdde64d8efc229c84e93b5517d2021b2497e939571e"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-musllinux_1_2_armv7l.whl", hash = "sha256:d9e4c3783cc04e9456c8218c2cfaebe36def57497d13d3356b59770beb8074ce"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:7e816b1411b54fbbf517864abde257c02814ab1c7701e9810544f63f76bb2b29"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:9ecbc9c5c0744b87d3d6c222bf99c87cefc2494f82ce9d22deea1aff706fc1a0"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:48c2877eec2e6ed1b5b915356279ad05b60da3ff23c5ec620fe988c3187ba0e1"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:7e273b438be401bffcca2968347937a40485826015615a344a74f364e28649ce"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-win32.whl", hash = "sha256:f1be0f3e257fdf0e36435ba1e4fd76a256521eee075dfd502172c9aca9e1d00b"},
|
||||
{file = "simsimd-5.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5d9fe4ed43e432c2fe77e89008a68b617b7ab4d2c570a5be96a892df107a7d41"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f82ef97bea5ba13cbbd09d96197bbeac8e2d86a228393f7a0cdcdcd862fdaeda"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e7ab834fdf111c3c3a080ec73f37fd532f5e3d56e36431b64ccf24594b698d2"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3e554138c4019a19a4716f94b2432f4f11e6073c55cdbb41bdd3a86b4709c0e6"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c39a75d1b693fd510ad44a8b4f998164e17d3dff4deffb8af0d0006c0029b9c6"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:727b14f5be78843be759ef01c68b83e2505fe4f8181e8faedba2675d0850857c"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:38a62ffde6fee42ebeaa4f9200c4b4f1578b29672e506240df4b1ea286d6a983"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:293d3b6ceafb3ce68a64f04134399973acb9533ec888910292fe109fda5affdb"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d5a7aabe15515ffb697dcc7c186537b5feb14b0eb8701f6314a00a0ec4985408"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab79b8898a5bfaa9b9e7b7e529c153940f1a0ec36e1c2bfe737c1ef09ca4adcd"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:b237f95c6bc934b3f3f5cf7327d449969e75657d47b38ebd6a753fc3ea6f2088"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4acec868a46a2d1014741515c723f9b8151e33daab9d1160b697ef56402630fe"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:461671e29d1b4ac3994aae2618a7816899a26450d934d73201ee3af217b85a68"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:603230e85e547dea941fc46ed44db087e0f46d3b8800e3f02e149b98c7c1c0d6"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6df8ffdcd2b5dcb1b160d03a2e8a4d0aca9b8d0906dc091a8d4b94cc0d30d065"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-win32.whl", hash = "sha256:6b484af6c1676ba87335a55b4b16a909c50c352b122255031823b3e7da0998ad"},
|
||||
{file = "simsimd-5.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:e43c4d08076e7c554e7ef23237af31f2c3163fd47f5d5ff198b88268ecc97add"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:466684eec92f02014d9fe28e2ab106f4dda0ca0875c9f433b4ccc5fcea28ebda"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:be671a7564b8ff214fc38afe54e0eac21d1624ffe7b1da2419cde9c83ff925ad"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9fd9cc87de436d6338f93f0783c38b3aea9caa61461a7091bc9d0d9cdd746cb"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72b0c84db6e90210861756e894c66c83371db4a12421f68dd3824303d0e4c56b"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cd00384da1108fae1f4bbe09c96f799973ef645b99d7a275815ae80b79c82d6"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:253688e8e87d2940e483dbc6274c13a9ede081d09092587ed7e505ddec605594"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:32d4209bb9416006cebb32579a3bde96828002f6e859cf90101c7c68b23f0cb8"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ae913aa3066fd29bbde9528d4659dd3ee1c5584580abbc5e9a0eb581adab1dc6"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d8d0dff06cd8fda55d9e1a33980ad5c1bde56ce607e7f0978117bea79ff346bf"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:9febb6d142815f3bee30b8ab3a66712e69ce327962e30637ca398fa5ce38c909"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:96a26317343101fea838130736412d83ec63ad229c5627cbc8a21135f2226aae"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a6ca3aaaceb60d4afb8f8a5b0f0b2ea1b5286631d80bb4adaaf2648a8eebcb29"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2bf4300f5e84398c7684694bcede78934df42adfb0362c8ac0183082e48085cf"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9bd8c63cc96fef6bc22e3193290f4dbbd8f073e07572eb2429a8098f33c0320b"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-win32.whl", hash = "sha256:eb7937610fe1ec1614533d4d8f0ce621007884e98fc40d38cb404fa3bf0f0e8c"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:7aab7923937364ce0a3c49a5ffca548f82f768caebbcc0e2a8eacdf18be44302"},
|
||||
{file = "simsimd-5.6.0-cp39-cp39-win_arm64.whl", hash = "sha256:5b179383b35e35807bdd73328f4b145db6610901c93c4f3d7d0659971fd6ad93"},
|
||||
{file = "simsimd-5.6.0.tar.gz", hash = "sha256:bb0ecb7d6626cac8153b5c1a1197d77697d70bc8753d763e2503a8fcf0090ef2"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2710,13 +2736,13 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
|
||||
|
||||
[[package]]
|
||||
name = "syrupy"
|
||||
version = "4.7.1"
|
||||
version = "4.7.2"
|
||||
description = "Pytest Snapshot Test Utility"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1"
|
||||
files = [
|
||||
{file = "syrupy-4.7.1-py3-none-any.whl", hash = "sha256:be002267a512a4bedddfae2e026c93df1ea928ae10baadc09640516923376d41"},
|
||||
{file = "syrupy-4.7.1.tar.gz", hash = "sha256:f9d4485f3f27d0e5df6ed299cac6fa32eb40a441915d988e82be5a4bdda335c8"},
|
||||
{file = "syrupy-4.7.2-py3-none-any.whl", hash = "sha256:eae7ba6be5aed190237caa93be288e97ca1eec5ca58760e4818972a10c4acc64"},
|
||||
{file = "syrupy-4.7.2.tar.gz", hash = "sha256:ea45e099f242de1bb53018c238f408a5bb6c82007bc687aefcbeaa0e1c2e935a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -2724,13 +2750,13 @@ pytest = ">=7.0.0,<9.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "tenacity"
|
||||
version = "8.5.0"
|
||||
version = "9.0.0"
|
||||
description = "Retry code until it succeeds"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"},
|
||||
{file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"},
|
||||
{file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"},
|
||||
{file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@ -2849,13 +2875,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-python-dateutil"
|
||||
version = "2.9.0.20240906"
|
||||
version = "2.9.0.20241003"
|
||||
description = "Typing stubs for python-dateutil"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "types-python-dateutil-2.9.0.20240906.tar.gz", hash = "sha256:9706c3b68284c25adffc47319ecc7947e5bb86b3773f843c73906fd598bc176e"},
|
||||
{file = "types_python_dateutil-2.9.0.20240906-py3-none-any.whl", hash = "sha256:27c8cc2d058ccb14946eebcaaa503088f4f6dbc4fb6093d3d456a49aef2753f6"},
|
||||
{file = "types-python-dateutil-2.9.0.20241003.tar.gz", hash = "sha256:58cb85449b2a56d6684e41aeefb4c4280631246a0da1a719bdbe6f3fb0317446"},
|
||||
{file = "types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3053,4 +3079,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "d226029b416404cd24944878e0381319620636d4b4ccdee5646406e025ed79c4"
|
||||
content-hash = "553cec5042402574196ae21460a8f8915ddcd9c0190196bd9a666ef8c3a112a8"
|
||||
|
@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-core"
|
||||
version = "0.3.9"
|
||||
version = "0.3.10"
|
||||
description = "Building applications with LLMs through composability"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
@ -28,7 +28,7 @@ target-version = "py39"
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langsmith = "^0.1.125"
|
||||
tenacity = "^8.1.0,!=8.4.0"
|
||||
tenacity = ">=8.1.0,!=8.4.0,<10.0.0"
|
||||
jsonpatch = "^1.33"
|
||||
PyYAML = ">=5.3"
|
||||
packaging = ">=23.2,<25"
|
||||
@ -119,6 +119,7 @@ classmethod-decorators = [ "classmethod", "langchain_core.utils.pydantic.pre_ini
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
ruff = "^0.5"
|
||||
|
||||
|
||||
[tool.poetry.group.typing.dependencies]
|
||||
mypy = ">=1.10,<1.11"
|
||||
types-pyyaml = "^6.0.12.2"
|
||||
@ -126,11 +127,13 @@ types-requests = "^2.28.11.5"
|
||||
types-jinja2 = "^2.11.9"
|
||||
simsimd = "^5.0.0"
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
jupyter = "^1.0.0"
|
||||
setuptools = "^67.6.1"
|
||||
grandalf = "^0.8"
|
||||
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^7.3.0"
|
||||
freezegun = "^1.2.2"
|
||||
@ -149,12 +152,15 @@ python = "<3.12"
|
||||
version = "^1.26.0"
|
||||
python = ">=3.12"
|
||||
|
||||
|
||||
[tool.poetry.group.test_integration.dependencies]
|
||||
|
||||
|
||||
[tool.poetry.group.typing.dependencies.langchain-text-splitters]
|
||||
path = "../text-splitters"
|
||||
develop = true
|
||||
|
||||
|
||||
[tool.poetry.group.test.dependencies.langchain-standard-tests]
|
||||
path = "../standard-tests"
|
||||
develop = true
|
||||
|
File diff suppressed because one or more lines are too long
905
libs/langchain/poetry.lock
generated
905
libs/langchain/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain"
|
||||
version = "0.3.2"
|
||||
version = "0.3.3"
|
||||
description = "Building applications with LLMs through composability"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
@ -33,7 +33,7 @@ langchain-server = "langchain.server:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.8"
|
||||
langchain-core = "^0.3.10"
|
||||
langchain-text-splitters = "^0.3.0"
|
||||
langsmith = "^0.1.17"
|
||||
pydantic = "^2.7.4"
|
||||
@ -41,7 +41,7 @@ SQLAlchemy = ">=1.4,<3"
|
||||
requests = "^2"
|
||||
PyYAML = ">=5.3"
|
||||
aiohttp = "^3.8.3"
|
||||
tenacity = "^8.1.0,!=8.4.0"
|
||||
tenacity = ">=8.1.0,!=8.4.0,<10"
|
||||
[[tool.poetry.dependencies.numpy]]
|
||||
version = "^1"
|
||||
python = "<3.12"
|
||||
|
1359
libs/partners/chroma/poetry.lock
generated
1359
libs/partners/chroma/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-chroma"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
description = "An integration package connecting Chroma and LangChain"
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
@ -47,7 +47,7 @@ addopts = " --strict-markers --strict-config --durations=5"
|
||||
markers = [ "requires: mark tests as requiring a specific library", "asyncio: mark tests as requiring asyncio", "compile: mark placeholder test used to compile integration tests without running them",]
|
||||
|
||||
[tool.poetry.dependencies.chromadb]
|
||||
version = ">=0.4.0,<0.6.0,!=0.5.4,!=0.5.5"
|
||||
version = ">=0.4.0,<0.6.0,!=0.5.4,!=0.5.5,!=0.5.7,!=0.5.9,!=0.5.10,!=0.5.11,!=0.5.12"
|
||||
|
||||
[tool.poetry.dependencies.fastapi]
|
||||
version = ">=0.95.2,<1"
|
||||
|
@ -480,6 +480,15 @@ class BaseChatOpenAI(BaseChatModel):
|
||||
values = _build_model_kwargs(values, all_required_field_names)
|
||||
return values
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_temperature(cls, values: Dict[str, Any]) -> Any:
|
||||
"""Currently o1 models only allow temperature=1."""
|
||||
model = values.get("model_name") or values.get("model") or ""
|
||||
if model.startswith("o1") and "temperature" not in values:
|
||||
values["temperature"] = 1
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_environment(self) -> Self:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
|
@ -35,6 +35,13 @@ def test_openai_model_param() -> None:
|
||||
assert llm.model_name == "foo"
|
||||
|
||||
|
||||
def test_openai_o1_temperature() -> None:
|
||||
llm = ChatOpenAI(model="o1-preview")
|
||||
assert llm.temperature == 1
|
||||
llm = ChatOpenAI(model_name="o1-mini") # type: ignore[call-arg]
|
||||
assert llm.temperature == 1
|
||||
|
||||
|
||||
def test_function_message_dict_to_function_message() -> None:
|
||||
content = json.dumps({"result": "Example #1"})
|
||||
name = "test_function"
|
||||
|
Loading…
Reference in New Issue
Block a user